var/home/core/zuul-output/0000755000175000017500000000000015116307254014531 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015116315262015473 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004341601015116315253017676 0ustar rootrootDec 10 15:20:25 crc systemd[1]: Starting Kubernetes Kubelet... Dec 10 15:20:25 crc restorecon[4643]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:25 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 10 15:20:26 crc restorecon[4643]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 10 15:20:26 crc kubenswrapper[4669]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 15:20:26 crc kubenswrapper[4669]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 10 15:20:26 crc kubenswrapper[4669]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 15:20:26 crc kubenswrapper[4669]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 15:20:26 crc kubenswrapper[4669]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 10 15:20:26 crc kubenswrapper[4669]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.214879 4669 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218288 4669 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218305 4669 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218309 4669 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218313 4669 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218317 4669 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218320 4669 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218324 4669 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218327 4669 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218331 4669 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218334 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218338 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218342 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218345 4669 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218349 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218353 4669 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218357 4669 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218362 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218366 4669 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218370 4669 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218374 4669 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218377 4669 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218381 4669 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218384 4669 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218388 4669 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218392 4669 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218395 4669 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218399 4669 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218402 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218406 4669 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218410 4669 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218415 4669 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218419 4669 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218424 4669 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218428 4669 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218431 4669 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218435 4669 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218438 4669 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218441 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218445 4669 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218448 4669 feature_gate.go:330] unrecognized feature gate: Example Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218452 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218455 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218460 4669 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218463 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218467 4669 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218470 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218474 4669 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218478 4669 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218481 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218486 4669 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218489 4669 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218493 4669 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218498 4669 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218502 4669 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218506 4669 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218510 4669 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218513 4669 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218517 4669 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218520 4669 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218524 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218527 4669 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218531 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218534 4669 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218537 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218541 4669 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218546 4669 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218552 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218560 4669 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218565 4669 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218569 4669 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.218574 4669 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218658 4669 flags.go:64] FLAG: --address="0.0.0.0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218667 4669 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218676 4669 flags.go:64] FLAG: --anonymous-auth="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218682 4669 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218689 4669 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218694 4669 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218701 4669 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218708 4669 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218713 4669 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218718 4669 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218724 4669 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218729 4669 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218734 4669 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218740 4669 flags.go:64] FLAG: --cgroup-root="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218745 4669 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218750 4669 flags.go:64] FLAG: --client-ca-file="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218755 4669 flags.go:64] FLAG: --cloud-config="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218760 4669 flags.go:64] FLAG: --cloud-provider="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218765 4669 flags.go:64] FLAG: --cluster-dns="[]" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218770 4669 flags.go:64] FLAG: --cluster-domain="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218775 4669 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218779 4669 flags.go:64] FLAG: --config-dir="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218783 4669 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218788 4669 flags.go:64] FLAG: --container-log-max-files="5" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218794 4669 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218798 4669 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218802 4669 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218807 4669 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218811 4669 flags.go:64] FLAG: --contention-profiling="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218823 4669 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218827 4669 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218832 4669 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218836 4669 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218841 4669 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218846 4669 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218850 4669 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218856 4669 flags.go:64] FLAG: --enable-load-reader="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218861 4669 flags.go:64] FLAG: --enable-server="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218866 4669 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218872 4669 flags.go:64] FLAG: --event-burst="100" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218877 4669 flags.go:64] FLAG: --event-qps="50" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218881 4669 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218885 4669 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218889 4669 flags.go:64] FLAG: --eviction-hard="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218894 4669 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218898 4669 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218902 4669 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218907 4669 flags.go:64] FLAG: --eviction-soft="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218911 4669 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218915 4669 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218919 4669 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218923 4669 flags.go:64] FLAG: --experimental-mounter-path="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218928 4669 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218932 4669 flags.go:64] FLAG: --fail-swap-on="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218963 4669 flags.go:64] FLAG: --feature-gates="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218969 4669 flags.go:64] FLAG: --file-check-frequency="20s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218974 4669 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218978 4669 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218985 4669 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218990 4669 flags.go:64] FLAG: --healthz-port="10248" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218994 4669 flags.go:64] FLAG: --help="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.218999 4669 flags.go:64] FLAG: --hostname-override="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219004 4669 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219008 4669 flags.go:64] FLAG: --http-check-frequency="20s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219012 4669 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219017 4669 flags.go:64] FLAG: --image-credential-provider-config="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219021 4669 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219025 4669 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219029 4669 flags.go:64] FLAG: --image-service-endpoint="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219033 4669 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219037 4669 flags.go:64] FLAG: --kube-api-burst="100" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219042 4669 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219046 4669 flags.go:64] FLAG: --kube-api-qps="50" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219050 4669 flags.go:64] FLAG: --kube-reserved="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219054 4669 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219058 4669 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219063 4669 flags.go:64] FLAG: --kubelet-cgroups="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219067 4669 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219071 4669 flags.go:64] FLAG: --lock-file="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219075 4669 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219079 4669 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219084 4669 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219092 4669 flags.go:64] FLAG: --log-json-split-stream="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219097 4669 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219102 4669 flags.go:64] FLAG: --log-text-split-stream="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219107 4669 flags.go:64] FLAG: --logging-format="text" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219111 4669 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219117 4669 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219121 4669 flags.go:64] FLAG: --manifest-url="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219126 4669 flags.go:64] FLAG: --manifest-url-header="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219137 4669 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219142 4669 flags.go:64] FLAG: --max-open-files="1000000" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219148 4669 flags.go:64] FLAG: --max-pods="110" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219156 4669 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219161 4669 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219166 4669 flags.go:64] FLAG: --memory-manager-policy="None" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219171 4669 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219176 4669 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219181 4669 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219186 4669 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219195 4669 flags.go:64] FLAG: --node-status-max-images="50" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219201 4669 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219205 4669 flags.go:64] FLAG: --oom-score-adj="-999" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219209 4669 flags.go:64] FLAG: --pod-cidr="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219241 4669 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219250 4669 flags.go:64] FLAG: --pod-manifest-path="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219254 4669 flags.go:64] FLAG: --pod-max-pids="-1" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219258 4669 flags.go:64] FLAG: --pods-per-core="0" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219262 4669 flags.go:64] FLAG: --port="10250" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219267 4669 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219271 4669 flags.go:64] FLAG: --provider-id="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219275 4669 flags.go:64] FLAG: --qos-reserved="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219280 4669 flags.go:64] FLAG: --read-only-port="10255" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219287 4669 flags.go:64] FLAG: --register-node="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219297 4669 flags.go:64] FLAG: --register-schedulable="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219303 4669 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219312 4669 flags.go:64] FLAG: --registry-burst="10" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219317 4669 flags.go:64] FLAG: --registry-qps="5" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219322 4669 flags.go:64] FLAG: --reserved-cpus="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219327 4669 flags.go:64] FLAG: --reserved-memory="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219334 4669 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219339 4669 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219349 4669 flags.go:64] FLAG: --rotate-certificates="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219353 4669 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219358 4669 flags.go:64] FLAG: --runonce="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219362 4669 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219368 4669 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219373 4669 flags.go:64] FLAG: --seccomp-default="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219377 4669 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219381 4669 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219386 4669 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219391 4669 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219395 4669 flags.go:64] FLAG: --storage-driver-password="root" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219399 4669 flags.go:64] FLAG: --storage-driver-secure="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219403 4669 flags.go:64] FLAG: --storage-driver-table="stats" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219407 4669 flags.go:64] FLAG: --storage-driver-user="root" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219411 4669 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219417 4669 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219421 4669 flags.go:64] FLAG: --system-cgroups="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219425 4669 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219432 4669 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219436 4669 flags.go:64] FLAG: --tls-cert-file="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219440 4669 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219445 4669 flags.go:64] FLAG: --tls-min-version="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219449 4669 flags.go:64] FLAG: --tls-private-key-file="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219453 4669 flags.go:64] FLAG: --topology-manager-policy="none" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219457 4669 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219461 4669 flags.go:64] FLAG: --topology-manager-scope="container" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219465 4669 flags.go:64] FLAG: --v="2" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219471 4669 flags.go:64] FLAG: --version="false" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219477 4669 flags.go:64] FLAG: --vmodule="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219482 4669 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219486 4669 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219583 4669 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219594 4669 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219598 4669 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219602 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219605 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219610 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219614 4669 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219619 4669 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219624 4669 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219629 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219633 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219637 4669 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219640 4669 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219645 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219649 4669 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219653 4669 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219657 4669 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219660 4669 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219665 4669 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219670 4669 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219675 4669 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219678 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219683 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219686 4669 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219690 4669 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219694 4669 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219697 4669 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219701 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219704 4669 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219708 4669 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219711 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219715 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219718 4669 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219724 4669 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219727 4669 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219731 4669 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219734 4669 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219740 4669 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219744 4669 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219747 4669 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219752 4669 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219756 4669 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219760 4669 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219764 4669 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219767 4669 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219771 4669 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219774 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219778 4669 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219781 4669 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219784 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219788 4669 feature_gate.go:330] unrecognized feature gate: Example Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219792 4669 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219795 4669 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219799 4669 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219803 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219806 4669 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219810 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219814 4669 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219818 4669 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219821 4669 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219826 4669 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219830 4669 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219833 4669 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219837 4669 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219841 4669 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219846 4669 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219850 4669 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219853 4669 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219857 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219864 4669 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.219868 4669 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.219874 4669 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.232837 4669 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.232917 4669 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233043 4669 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233056 4669 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233065 4669 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233075 4669 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233084 4669 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233093 4669 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233101 4669 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233109 4669 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233117 4669 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233125 4669 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233133 4669 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233141 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233150 4669 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233158 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233166 4669 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233173 4669 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233181 4669 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233189 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233201 4669 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233240 4669 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233253 4669 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233264 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233274 4669 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233283 4669 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233293 4669 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233305 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233314 4669 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233323 4669 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233331 4669 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233339 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233348 4669 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233359 4669 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233370 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233378 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233386 4669 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233394 4669 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233401 4669 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233409 4669 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233417 4669 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233424 4669 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233432 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233440 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233448 4669 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233456 4669 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233463 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233471 4669 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233480 4669 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233489 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233496 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233507 4669 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233517 4669 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233525 4669 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233534 4669 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233542 4669 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233550 4669 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233557 4669 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233565 4669 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233573 4669 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233581 4669 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233589 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233596 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233604 4669 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233612 4669 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233621 4669 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233629 4669 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233637 4669 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233645 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233653 4669 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233661 4669 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233669 4669 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233676 4669 feature_gate.go:330] unrecognized feature gate: Example Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.233689 4669 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233913 4669 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233926 4669 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233935 4669 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233944 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233951 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233959 4669 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233967 4669 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233975 4669 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233983 4669 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.233990 4669 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234011 4669 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234020 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234028 4669 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234035 4669 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234044 4669 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234051 4669 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234059 4669 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234067 4669 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234075 4669 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234083 4669 feature_gate.go:330] unrecognized feature gate: Example Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234090 4669 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234098 4669 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234106 4669 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234117 4669 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234125 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234133 4669 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234142 4669 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234150 4669 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234157 4669 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234165 4669 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234173 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234181 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234190 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234198 4669 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234205 4669 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234244 4669 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234259 4669 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234272 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234280 4669 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234288 4669 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234296 4669 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234304 4669 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234314 4669 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234323 4669 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234331 4669 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234339 4669 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234349 4669 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234356 4669 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234364 4669 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234372 4669 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234380 4669 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234388 4669 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234395 4669 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234403 4669 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234411 4669 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234419 4669 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234427 4669 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234435 4669 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234445 4669 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234455 4669 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234464 4669 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234472 4669 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234481 4669 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234490 4669 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234500 4669 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234509 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234518 4669 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234526 4669 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234537 4669 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234546 4669 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.234556 4669 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.234569 4669 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.235092 4669 server.go:940] "Client rotation is on, will bootstrap in background" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.239975 4669 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.240113 4669 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.241075 4669 server.go:997] "Starting client certificate rotation" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.241113 4669 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.241602 4669 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-04 09:34:26.239046645 +0000 UTC Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.241714 4669 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 594h13m59.997339096s for next certificate rotation Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.251174 4669 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.254434 4669 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.265689 4669 log.go:25] "Validated CRI v1 runtime API" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.292941 4669 log.go:25] "Validated CRI v1 image API" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.294797 4669 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.297152 4669 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-10-15-14-53-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.297186 4669 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.312990 4669 manager.go:217] Machine: {Timestamp:2025-12-10 15:20:26.311575597 +0000 UTC m=+0.228522244 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:014cfcb6-977c-4f8f-a8ab-18a9d298357b BootID:4f7987ab-4a09-457d-8b66-5542ead6568f Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:68:0a:55 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:68:0a:55 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:11:93:30 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:94:91:18 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:69:22:41 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:8b:90:98 Speed:-1 Mtu:1496} {Name:ens7.23 MacAddress:52:54:00:d5:a6:04 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:4e:03:f4:aa:5c:ce Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:d6:61:38:e8:ab:9f Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.313526 4669 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.313813 4669 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.314544 4669 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.314859 4669 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.315025 4669 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.315470 4669 topology_manager.go:138] "Creating topology manager with none policy" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.315527 4669 container_manager_linux.go:303] "Creating device plugin manager" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.315834 4669 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.315932 4669 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.316343 4669 state_mem.go:36] "Initialized new in-memory state store" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.316659 4669 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.317859 4669 kubelet.go:418] "Attempting to sync node with API server" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.317946 4669 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.318031 4669 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.318105 4669 kubelet.go:324] "Adding apiserver pod source" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.318188 4669 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.321004 4669 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.321445 4669 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.322426 4669 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323206 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323310 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323383 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323438 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323491 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323543 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323613 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323689 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323777 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.323869 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.323877 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.323972 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.324082 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.324030 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.324160 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.324101 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.324663 4669 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.325543 4669 server.go:1280] "Started kubelet" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.325923 4669 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.326082 4669 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.326514 4669 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 10 15:20:26 crc systemd[1]: Started Kubernetes Kubelet. Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.328139 4669 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.330749 4669 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.330806 4669 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.331189 4669 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 02:08:52.575328536 +0000 UTC Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.332276 4669 server.go:460] "Adding debug handlers to kubelet server" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.332380 4669 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.332391 4669 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.332579 4669 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.332239 4669 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187fe3c7d2b38866 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 15:20:26.325502054 +0000 UTC m=+0.242448681,LastTimestamp:2025-12-10 15:20:26.325502054 +0000 UTC m=+0.242448681,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.334252 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="200ms" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.334755 4669 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.335717 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.335810 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.337675 4669 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.337699 4669 factory.go:55] Registering systemd factory Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.337710 4669 factory.go:221] Registration of the systemd container factory successfully Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.338143 4669 factory.go:153] Registering CRI-O factory Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.338544 4669 factory.go:221] Registration of the crio container factory successfully Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.339964 4669 factory.go:103] Registering Raw factory Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.339999 4669 manager.go:1196] Started watching for new ooms in manager Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358359 4669 manager.go:319] Starting recovery of all containers Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358413 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358779 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358840 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358861 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358879 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358896 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358912 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358928 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358973 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.358990 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359006 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359023 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359039 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359057 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359074 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359091 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359108 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359147 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359163 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359180 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359196 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359211 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359258 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359278 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359295 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359329 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359348 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359364 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359379 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359396 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359410 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359441 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359459 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359476 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359492 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359508 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359525 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359549 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359575 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359594 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359613 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359631 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359653 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359674 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359694 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359714 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359737 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359754 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359774 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359794 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359813 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359830 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359858 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359881 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359902 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359924 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359966 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.359987 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360006 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360028 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360046 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360064 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360843 4669 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360892 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360915 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360936 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360956 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.360979 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361000 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361017 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361037 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361055 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361074 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361095 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361113 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361136 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361158 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361176 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361197 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361254 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361280 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361301 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361321 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361341 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361363 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361383 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361407 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361426 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361446 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361467 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361486 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361505 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361526 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361545 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361565 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361638 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361662 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361682 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361704 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361723 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361744 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361764 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361786 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361803 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361821 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361850 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361872 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361892 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361914 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361936 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361956 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361977 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.361998 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362016 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362036 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362056 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362076 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362095 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362115 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362133 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362151 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362206 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362256 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362277 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362296 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362314 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362333 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362353 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362372 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362389 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362406 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362424 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362441 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362462 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362482 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362498 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362516 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362537 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362555 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362572 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362590 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362610 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362630 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362648 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362666 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362684 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362703 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362720 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362739 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362758 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362779 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362798 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362817 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362834 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362852 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362872 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362891 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362908 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362928 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.362983 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363002 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363021 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363037 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363055 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363073 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363091 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363117 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363133 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363153 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363172 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363188 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363239 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363261 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363279 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363297 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363315 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363335 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363352 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363370 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363388 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363404 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363422 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363441 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363460 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363478 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363495 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363515 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363534 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363550 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363566 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363584 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363601 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363618 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363635 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363655 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363677 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363694 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363717 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363736 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363756 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363773 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363793 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363812 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363832 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363850 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363867 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363888 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363907 4669 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363922 4669 reconstruct.go:97] "Volume reconstruction finished" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.363934 4669 reconciler.go:26] "Reconciler: start to sync state" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.382020 4669 manager.go:324] Recovery completed Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.394311 4669 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.396673 4669 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.396731 4669 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.396766 4669 kubelet.go:2335] "Starting kubelet main sync loop" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.396820 4669 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 10 15:20:26 crc kubenswrapper[4669]: W1210 15:20:26.398576 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.398719 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.399206 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.408508 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.408573 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.408589 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.410524 4669 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.411644 4669 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.411950 4669 state_mem.go:36] "Initialized new in-memory state store" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.432892 4669 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.465923 4669 policy_none.go:49] "None policy: Start" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.469411 4669 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.469481 4669 state_mem.go:35] "Initializing new in-memory state store" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.497282 4669 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.516038 4669 manager.go:334] "Starting Device Plugin manager" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.516097 4669 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.516115 4669 server.go:79] "Starting device plugin registration server" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.516632 4669 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.516650 4669 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.517099 4669 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.517253 4669 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.517270 4669 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.526177 4669 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.535995 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="400ms" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.618299 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.620478 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.620558 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.620574 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.620618 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.621894 4669 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.698267 4669 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.698453 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.700100 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.700160 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.700175 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.700422 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.700694 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.700770 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.702456 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.702588 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.702679 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.702460 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.702805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.702820 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.703162 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.703196 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.703232 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.704206 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.704428 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.704525 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.704230 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.704826 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.704842 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.705012 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.705463 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.705492 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.706774 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.706799 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.706808 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.707026 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.707047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.707057 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.707236 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.707418 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.707470 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.708268 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.708375 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.708449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.709094 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.709125 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.709136 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.709352 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.709380 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.710078 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.710111 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.710120 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.770126 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.770455 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.770604 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.770730 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.770811 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.770907 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771020 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771102 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771208 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771365 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771485 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771604 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771705 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771796 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.771893 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.822763 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.824907 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.825037 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.825175 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.825311 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.825945 4669 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.873630 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874287 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874480 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874772 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874993 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875199 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875414 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875599 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875790 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875963 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876150 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876369 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876566 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876798 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876992 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875741 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.873810 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875920 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875156 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876104 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874712 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876323 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875351 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876517 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874421 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876748 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.875475 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.876947 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.874911 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: I1210 15:20:26.877128 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:26 crc kubenswrapper[4669]: E1210 15:20:26.937760 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="800ms" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.033885 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.040366 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.064267 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.065788 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-0f8ab1abc8d3016ff806167102c6d6553f6b85d1eed7bef29a48fd2f750eac2f WatchSource:0}: Error finding container 0f8ab1abc8d3016ff806167102c6d6553f6b85d1eed7bef29a48fd2f750eac2f: Status 404 returned error can't find the container with id 0f8ab1abc8d3016ff806167102c6d6553f6b85d1eed7bef29a48fd2f750eac2f Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.071000 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-26691436aff16bc2a1da8f19c7e33f3bb0a8bb75768c93a2ff42b5d1ca604749 WatchSource:0}: Error finding container 26691436aff16bc2a1da8f19c7e33f3bb0a8bb75768c93a2ff42b5d1ca604749: Status 404 returned error can't find the container with id 26691436aff16bc2a1da8f19c7e33f3bb0a8bb75768c93a2ff42b5d1ca604749 Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.079084 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-dcb3fe5a5af4b47424f2842798554bdcaa7e69b647497c4c7f6ca2a3bf585cb6 WatchSource:0}: Error finding container dcb3fe5a5af4b47424f2842798554bdcaa7e69b647497c4c7f6ca2a3bf585cb6: Status 404 returned error can't find the container with id dcb3fe5a5af4b47424f2842798554bdcaa7e69b647497c4c7f6ca2a3bf585cb6 Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.080338 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.088349 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.114065 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-46138ac7b62e2cfc44cd5c6f29751144f95c0b8712f89784acac166beb0f25a9 WatchSource:0}: Error finding container 46138ac7b62e2cfc44cd5c6f29751144f95c0b8712f89784acac166beb0f25a9: Status 404 returned error can't find the container with id 46138ac7b62e2cfc44cd5c6f29751144f95c0b8712f89784acac166beb0f25a9 Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.226299 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.228232 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.228274 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.228286 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.228313 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.228729 4669 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.327538 4669 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.331568 4669 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 09:26:47.231551305 +0000 UTC Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.402080 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"46138ac7b62e2cfc44cd5c6f29751144f95c0b8712f89784acac166beb0f25a9"} Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.403147 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"dcb3fe5a5af4b47424f2842798554bdcaa7e69b647497c4c7f6ca2a3bf585cb6"} Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.403967 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"26691436aff16bc2a1da8f19c7e33f3bb0a8bb75768c93a2ff42b5d1ca604749"} Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.404925 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0f8ab1abc8d3016ff806167102c6d6553f6b85d1eed7bef29a48fd2f750eac2f"} Dec 10 15:20:27 crc kubenswrapper[4669]: I1210 15:20:27.405961 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"bacd5f337fa0084bef49c0bd3dfa4ea3cc35cb2204d9b766ae68c7faa0287977"} Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.465297 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.465408 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.588037 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.588198 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.691996 4669 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187fe3c7d2b38866 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 15:20:26.325502054 +0000 UTC m=+0.242448681,LastTimestamp:2025-12-10 15:20:26.325502054 +0000 UTC m=+0.242448681,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.729790 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.729900 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.738568 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="1.6s" Dec 10 15:20:27 crc kubenswrapper[4669]: W1210 15:20:27.795977 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:27 crc kubenswrapper[4669]: E1210 15:20:27.796091 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.110:6443: connect: connection refused" logger="UnhandledError" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.029342 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.031078 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.031130 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.031142 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.031174 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:28 crc kubenswrapper[4669]: E1210 15:20:28.031683 4669 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.110:6443: connect: connection refused" node="crc" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.327682 4669 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.110:6443: connect: connection refused Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.331691 4669 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 20:24:34.56910393 +0000 UTC Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.331749 4669 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 845h4m6.237357786s for next certificate rotation Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.412578 4669 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04" exitCode=0 Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.412679 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.412672 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.413869 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.413904 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.413919 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.417851 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.417915 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.417943 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.417961 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.418096 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.420711 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.420761 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.420777 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.422970 4669 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871" exitCode=0 Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.423054 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.423254 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.424443 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.424484 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.424501 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.432411 4669 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70" exitCode=0 Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.432521 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.432638 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.434486 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.434539 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.434562 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.437693 4669 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8" exitCode=0 Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.437743 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8"} Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.437883 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.439163 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.439196 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.439208 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.452811 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.454174 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.454246 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:28 crc kubenswrapper[4669]: I1210 15:20:28.454263 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.444528 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.444610 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.444631 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.444709 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.446087 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.446125 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.446141 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.446273 4669 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46" exitCode=0 Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.446389 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.446532 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.447622 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.447647 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.447673 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.449720 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.449739 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.451204 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.451261 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.451273 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453087 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453141 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453619 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453655 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453669 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453682 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.453692 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682"} Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.454044 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.454079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.454089 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.454760 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.454778 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.454786 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.632181 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.637671 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.637710 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.637726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:29 crc kubenswrapper[4669]: I1210 15:20:29.637753 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.276123 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.281789 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458616 4669 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7" exitCode=0 Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458766 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458790 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7"} Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458771 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458865 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458904 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.458902 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.459021 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.459039 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.460329 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.460374 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.460392 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461142 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461191 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461208 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461145 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461287 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461305 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461534 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.461569 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.462518 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.462554 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:30 crc kubenswrapper[4669]: I1210 15:20:30.462569 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464147 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464191 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464639 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222"} Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464666 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc"} Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464903 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464921 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:31 crc kubenswrapper[4669]: I1210 15:20:31.464927 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.042838 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.043188 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.044658 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.044730 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.044747 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.468690 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.471919 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095"} Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.471958 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8"} Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.471972 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197"} Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.471982 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.472024 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.472047 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.473145 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.473174 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.473184 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.473253 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.473284 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.473301 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.730920 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.731158 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.731202 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.732636 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.732677 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:32 crc kubenswrapper[4669]: I1210 15:20:32.732689 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.474697 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.475908 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.475970 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.475992 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.982879 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.983069 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.984116 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.984152 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:33 crc kubenswrapper[4669]: I1210 15:20:33.984167 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.298478 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.298747 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.298818 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.300597 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.300645 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.300660 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.317482 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.478160 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.479636 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.479711 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:34 crc kubenswrapper[4669]: I1210 15:20:34.479735 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.430258 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.430452 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.432091 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.432162 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.432195 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.437023 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.437281 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.438623 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.438658 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.438667 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.469155 4669 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.469243 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.763962 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.764321 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.766434 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.766498 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:35 crc kubenswrapper[4669]: I1210 15:20:35.766513 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:36 crc kubenswrapper[4669]: E1210 15:20:36.526723 4669 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 10 15:20:39 crc kubenswrapper[4669]: I1210 15:20:39.327805 4669 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 10 15:20:39 crc kubenswrapper[4669]: E1210 15:20:39.340446 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 10 15:20:39 crc kubenswrapper[4669]: W1210 15:20:39.611202 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 10 15:20:39 crc kubenswrapper[4669]: I1210 15:20:39.611342 4669 trace.go:236] Trace[421945567]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 15:20:29.609) (total time: 10001ms): Dec 10 15:20:39 crc kubenswrapper[4669]: Trace[421945567]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (15:20:39.611) Dec 10 15:20:39 crc kubenswrapper[4669]: Trace[421945567]: [10.001407436s] [10.001407436s] END Dec 10 15:20:39 crc kubenswrapper[4669]: E1210 15:20:39.611372 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 10 15:20:39 crc kubenswrapper[4669]: E1210 15:20:39.639174 4669 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Dec 10 15:20:39 crc kubenswrapper[4669]: W1210 15:20:39.730626 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 10 15:20:39 crc kubenswrapper[4669]: I1210 15:20:39.730763 4669 trace.go:236] Trace[85605428]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 15:20:29.728) (total time: 10002ms): Dec 10 15:20:39 crc kubenswrapper[4669]: Trace[85605428]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (15:20:39.730) Dec 10 15:20:39 crc kubenswrapper[4669]: Trace[85605428]: [10.002380449s] [10.002380449s] END Dec 10 15:20:39 crc kubenswrapper[4669]: E1210 15:20:39.730806 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 10 15:20:39 crc kubenswrapper[4669]: W1210 15:20:39.797257 4669 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Dec 10 15:20:39 crc kubenswrapper[4669]: I1210 15:20:39.797416 4669 trace.go:236] Trace[2074651941]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 15:20:29.795) (total time: 10002ms): Dec 10 15:20:39 crc kubenswrapper[4669]: Trace[2074651941]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10002ms (15:20:39.797) Dec 10 15:20:39 crc kubenswrapper[4669]: Trace[2074651941]: [10.00231695s] [10.00231695s] END Dec 10 15:20:39 crc kubenswrapper[4669]: E1210 15:20:39.797458 4669 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 10 15:20:40 crc kubenswrapper[4669]: I1210 15:20:40.334456 4669 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 10 15:20:40 crc kubenswrapper[4669]: I1210 15:20:40.334534 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 10 15:20:40 crc kubenswrapper[4669]: I1210 15:20:40.343283 4669 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\": RBAC: [clusterrole.rbac.authorization.k8s.io \"system:openshift:public-info-viewer\" not found, clusterrole.rbac.authorization.k8s.io \"system:public-info-viewer\" not found]","reason":"Forbidden","details":{},"code":403} Dec 10 15:20:40 crc kubenswrapper[4669]: I1210 15:20:40.343361 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 10 15:20:42 crc kubenswrapper[4669]: I1210 15:20:42.839956 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:42 crc kubenswrapper[4669]: I1210 15:20:42.841731 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:42 crc kubenswrapper[4669]: I1210 15:20:42.841816 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:42 crc kubenswrapper[4669]: I1210 15:20:42.841835 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:42 crc kubenswrapper[4669]: I1210 15:20:42.841878 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:42 crc kubenswrapper[4669]: E1210 15:20:42.845894 4669 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 10 15:20:43 crc kubenswrapper[4669]: I1210 15:20:43.689567 4669 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 10 15:20:43 crc kubenswrapper[4669]: I1210 15:20:43.988234 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.307467 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.312984 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.330122 4669 apiserver.go:52] "Watching apiserver" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.333064 4669 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.333510 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g"] Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.333886 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:44 crc kubenswrapper[4669]: E1210 15:20:44.333956 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.333991 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.334169 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.334190 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:44 crc kubenswrapper[4669]: E1210 15:20:44.334253 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.334270 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.334271 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:44 crc kubenswrapper[4669]: E1210 15:20:44.334396 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.335707 4669 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.336926 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.336945 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.336958 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.337953 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.338075 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.338102 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.338121 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.338698 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.338698 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.362950 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.397613 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.412694 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.430730 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.444768 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.459182 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.473086 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.483857 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: I1210 15:20:44.497122 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:44 crc kubenswrapper[4669]: E1210 15:20:44.577541 4669 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.312868 4669 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.312927 4669 trace.go:236] Trace[535605469]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (10-Dec-2025 15:20:30.404) (total time: 14908ms): Dec 10 15:20:45 crc kubenswrapper[4669]: Trace[535605469]: ---"Objects listed" error: 14908ms (15:20:45.312) Dec 10 15:20:45 crc kubenswrapper[4669]: Trace[535605469]: [14.9083137s] [14.9083137s] END Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.312968 4669 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.413591 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.413644 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.413693 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.413716 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.414011 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.414026 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.414082 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.414989 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415016 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.414991 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415053 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415085 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415378 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415238 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415322 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.415988 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416041 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416098 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416335 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416401 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416131 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416483 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416728 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416770 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416506 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416840 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.416863 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.417062 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.417123 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.417149 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.417424 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.418118 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.418184 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.418505 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428194 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428342 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428371 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428394 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428413 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428434 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428455 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428505 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428547 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428569 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428589 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428613 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428633 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428654 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428676 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428698 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428719 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428843 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.428867 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.429074 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.429136 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.429264 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.429435 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.429453 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.429845 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430019 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430210 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430212 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430213 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430281 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430306 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430322 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430341 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430359 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430375 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430390 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430407 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430425 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430441 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430459 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430476 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430493 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430509 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430525 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430542 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430559 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430574 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430588 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430604 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430619 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430683 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430702 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430718 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430733 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430752 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430769 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430783 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430797 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430813 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430833 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430847 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430861 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430876 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430893 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430907 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430925 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430939 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430955 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430970 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430985 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431005 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431020 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431036 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431053 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431068 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431083 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431098 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431114 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431129 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431143 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431157 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431171 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431187 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431238 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431256 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431270 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431285 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431302 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431318 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431332 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431347 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431363 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431379 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431393 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431408 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431423 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431437 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431453 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431471 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431485 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431500 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431515 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431531 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431549 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431565 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431580 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431595 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431613 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431628 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431645 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431659 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431674 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431690 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431705 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431720 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431734 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431751 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431766 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431781 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431796 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431812 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431828 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431843 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431869 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431885 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431901 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431916 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431979 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431995 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432010 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432026 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432041 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432056 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432071 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432086 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432102 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432118 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432133 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432148 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432165 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432180 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432198 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432657 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432676 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432695 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432710 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432727 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432745 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432760 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432776 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432792 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432808 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432827 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432843 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432860 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432877 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432895 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432911 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432928 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432945 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432961 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432977 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432994 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433011 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433026 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433043 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433059 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433076 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433093 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433110 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433128 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433144 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433160 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433178 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433194 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433225 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433243 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433258 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433275 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433293 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433311 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433329 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433346 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433363 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433402 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433419 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433455 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433483 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433503 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433522 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433542 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433562 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433583 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433602 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433621 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433640 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433657 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433677 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433695 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433716 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433765 4669 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433778 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433789 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433799 4669 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433808 4669 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433821 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433834 4669 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433846 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433860 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433872 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433885 4669 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433898 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433911 4669 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433923 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433936 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433949 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433959 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433970 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433980 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433993 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434006 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434020 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434033 4669 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434045 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434055 4669 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434906 4669 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430456 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.430526 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.431012 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.432853 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433047 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433271 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433313 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433465 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433647 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433752 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.433838 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434016 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434199 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434552 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434617 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.434914 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.435112 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.435309 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.435505 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.436371 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.441412 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.441369 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.441954 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.442384 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.442883 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.443065 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.444204 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.445327 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.445461 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.445661 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.445865 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.446083 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.446354 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.446510 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.446747 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.446809 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.446820 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.447175 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.447526 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.447617 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.447770 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.448658 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.448928 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.448959 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.448992 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449165 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449184 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449324 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449452 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449465 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449885 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.449878 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.450189 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.450210 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.450851 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.452429 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.452650 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.452760 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.452830 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453095 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453189 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453441 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453488 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453556 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453766 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.454032 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.454373 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.454660 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.454949 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.454970 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.455639 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.455648 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.455912 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.456271 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.456415 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.456665 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.456941 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.457284 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.457412 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.457733 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.458515 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.458569 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.459018 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.459141 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.459661 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.460323 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.460700 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.461013 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.461041 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.461443 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.461807 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.462114 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.462338 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.462376 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.462554 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.462921 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.462943 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.463098 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.463259 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.463360 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.463606 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.463766 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.463894 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.464150 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.464319 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.464344 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.464778 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.464868 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.459314 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465313 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465306 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.465497 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:20:45.965465417 +0000 UTC m=+19.882412244 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465547 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465658 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465699 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465810 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465832 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465957 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.465989 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466030 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466174 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466306 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466441 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466577 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466759 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.453994 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.466950 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467117 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467135 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467183 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467347 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467423 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467560 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467731 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467978 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.467981 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.468167 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.468338 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.468422 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.469955 4669 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.470017 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.470357 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.473671 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.473771 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:45.973749294 +0000 UTC m=+19.890695921 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.474410 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.476649 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.476910 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.477065 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.477257 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.479862 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.552394 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.552598 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.552804 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.552959 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553096 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553142 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553168 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.553277 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553330 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.553338 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:46.05332151 +0000 UTC m=+19.970268137 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553693 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553758 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553774 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553788 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553982 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553864 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554055 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554199 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554264 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554398 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.553638 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554602 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554634 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554682 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.554745 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.555060 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.555183 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.556062 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.559406 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.567413 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.572112 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.574089 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.574159 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.574186 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.574322 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 10 15:20:45 crc kubenswrapper[4669]: W1210 15:20:45.574491 4669 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.574520 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.574699 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:46.074670618 +0000 UTC m=+19.991617445 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.574823 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.574892 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.574912 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.575064 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.575211 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.575268 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.575443 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.581856 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.581879 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.581900 4669 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.581932 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.581954 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.581989 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582009 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582037 4669 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582054 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582071 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582088 4669 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582114 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582133 4669 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582154 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582183 4669 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582203 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582248 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582266 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582293 4669 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582311 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582328 4669 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582346 4669 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582374 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582397 4669 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582421 4669 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582423 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582451 4669 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582492 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.582429 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.582543 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.582559 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:45 crc kubenswrapper[4669]: E1210 15:20:45.582614 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:46.082595707 +0000 UTC m=+19.999542334 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582515 4669 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582727 4669 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582739 4669 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582750 4669 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582763 4669 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582787 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582797 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582807 4669 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582820 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582829 4669 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582838 4669 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582848 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582860 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582870 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582880 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582891 4669 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582901 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582909 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582919 4669 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582931 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582941 4669 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582950 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582959 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582972 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582982 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.582991 4669 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583002 4669 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583015 4669 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583024 4669 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583034 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583048 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583058 4669 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583070 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583079 4669 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583093 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583102 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583112 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583122 4669 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583134 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583146 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583156 4669 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583165 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583176 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583186 4669 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583196 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583208 4669 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583231 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583241 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583252 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583263 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583273 4669 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583282 4669 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583293 4669 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583307 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583317 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583326 4669 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.583338 4669 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584362 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584377 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584386 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584399 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584408 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584417 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584427 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584441 4669 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584450 4669 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584483 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584492 4669 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584503 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584511 4669 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584520 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584531 4669 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584562 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584572 4669 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584579 4669 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584611 4669 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584620 4669 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584629 4669 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584638 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584683 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584693 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584702 4669 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584714 4669 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584724 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584735 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584746 4669 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584761 4669 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584771 4669 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584782 4669 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584793 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584805 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584813 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584822 4669 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584853 4669 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584865 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584875 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584884 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584896 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584905 4669 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584914 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584923 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584936 4669 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584945 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584953 4669 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584962 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584973 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584982 4669 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.584990 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585025 4669 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585035 4669 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585152 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585235 4669 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585250 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585264 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585273 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585315 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585329 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585339 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585347 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585355 4669 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585367 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585377 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585386 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585398 4669 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585408 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585420 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585429 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585440 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585449 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585458 4669 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585467 4669 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585482 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585494 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585504 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585515 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585530 4669 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585541 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585591 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.585605 4669 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.589258 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.591472 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.600504 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.622947 4669 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.650459 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.677830 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.691292 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.701725 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.746475 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.768708 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.833636 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.845555 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.851614 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.853389 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.859724 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.866232 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 10 15:20:45 crc kubenswrapper[4669]: W1210 15:20:45.869823 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-8f67e24d3a0f6ab9fe43b854783ccafe850e7e869451b43735e70342280c34e4 WatchSource:0}: Error finding container 8f67e24d3a0f6ab9fe43b854783ccafe850e7e869451b43735e70342280c34e4: Status 404 returned error can't find the container with id 8f67e24d3a0f6ab9fe43b854783ccafe850e7e869451b43735e70342280c34e4 Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.873685 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: W1210 15:20:45.886829 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-f2d0b9dd25f7bd0812a4c444b4a13c533aba3c262766211ce44fe1b5ffe9bddd WatchSource:0}: Error finding container f2d0b9dd25f7bd0812a4c444b4a13c533aba3c262766211ce44fe1b5ffe9bddd: Status 404 returned error can't find the container with id f2d0b9dd25f7bd0812a4c444b4a13c533aba3c262766211ce44fe1b5ffe9bddd Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.895451 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.909821 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.912439 4669 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.915208 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.930934 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.946841 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.974528 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.984694 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:45 crc kubenswrapper[4669]: I1210 15:20:45.996660 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.011065 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.022179 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.031047 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.034826 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.034915 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.034979 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.035020 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:47.035008506 +0000 UTC m=+20.951955133 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.035080 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:20:47.035062337 +0000 UTC m=+20.952008964 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.048363 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.068321 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.096635 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.121754 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.135270 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.135313 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.135332 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135449 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135465 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135479 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135527 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:47.135512939 +0000 UTC m=+21.052459566 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135532 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135578 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135590 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135598 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135624 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:47.135616403 +0000 UTC m=+21.052563030 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.135643 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:47.135633923 +0000 UTC m=+21.052580550 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.151786 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.179588 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.191673 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.215483 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.406752 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.406752 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.406876 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.406767 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.406958 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:46 crc kubenswrapper[4669]: E1210 15:20:46.407092 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.410877 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.411743 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.412536 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.413947 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.414695 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.415670 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.416421 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.416975 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.417982 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.418474 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.419441 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.420323 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.421235 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.421716 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.424250 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.424767 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.425332 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.426054 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.426608 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.427160 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.427971 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.428556 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.429958 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.430740 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.431113 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.432317 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.433264 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.433911 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.434545 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.435094 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.435903 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.436371 4669 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.436460 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.438474 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.438987 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.439670 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.441558 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.442753 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.443450 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.444734 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.445516 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.446462 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.447118 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.448433 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.449594 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.450173 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.450761 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.451341 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.452128 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.452881 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.453392 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.453821 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.457087 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.457650 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.458185 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.469374 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.495774 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.547482 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.586199 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.591989 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749"} Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.592312 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f2d0b9dd25f7bd0812a4c444b4a13c533aba3c262766211ce44fe1b5ffe9bddd"} Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.595245 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8f67e24d3a0f6ab9fe43b854783ccafe850e7e869451b43735e70342280c34e4"} Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.597293 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2"} Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.597345 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9"} Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.597357 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d7aaea86d5cc762201818fad960e1fa4fe15f483f0a50227854e60d493972b54"} Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.610992 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.636650 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.652266 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.693590 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.709389 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.723785 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.736228 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.755359 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.780620 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.807175 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.822487 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.833459 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.852555 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.982547 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-vck4f"] Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.982905 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.989284 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.989364 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 10 15:20:46 crc kubenswrapper[4669]: I1210 15:20:46.994862 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.047530 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.047616 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/11cdb984-c1c3-4762-a527-8f0243733219-hosts-file\") pod \"node-resolver-vck4f\" (UID: \"11cdb984-c1c3-4762-a527-8f0243733219\") " pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.047689 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:20:49.047658541 +0000 UTC m=+22.964605168 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.047795 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.047826 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbxbd\" (UniqueName: \"kubernetes.io/projected/11cdb984-c1c3-4762-a527-8f0243733219-kube-api-access-tbxbd\") pod \"node-resolver-vck4f\" (UID: \"11cdb984-c1c3-4762-a527-8f0243733219\") " pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.047916 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.047950 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:49.047943658 +0000 UTC m=+22.964890285 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.084770 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.148596 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/11cdb984-c1c3-4762-a527-8f0243733219-hosts-file\") pod \"node-resolver-vck4f\" (UID: \"11cdb984-c1c3-4762-a527-8f0243733219\") " pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.148647 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.148670 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.148690 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.148747 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbxbd\" (UniqueName: \"kubernetes.io/projected/11cdb984-c1c3-4762-a527-8f0243733219-kube-api-access-tbxbd\") pod \"node-resolver-vck4f\" (UID: \"11cdb984-c1c3-4762-a527-8f0243733219\") " pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.148821 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/11cdb984-c1c3-4762-a527-8f0243733219-hosts-file\") pod \"node-resolver-vck4f\" (UID: \"11cdb984-c1c3-4762-a527-8f0243733219\") " pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149009 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149031 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149042 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149091 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:49.149069517 +0000 UTC m=+23.066016144 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149132 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149150 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149161 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149202 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:49.14918922 +0000 UTC m=+23.066135847 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149267 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: E1210 15:20:47.149293 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:49.149286783 +0000 UTC m=+23.066233410 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.192856 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbxbd\" (UniqueName: \"kubernetes.io/projected/11cdb984-c1c3-4762-a527-8f0243733219-kube-api-access-tbxbd\") pod \"node-resolver-vck4f\" (UID: \"11cdb984-c1c3-4762-a527-8f0243733219\") " pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.200400 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.226440 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.248769 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.264542 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.279092 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.298419 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.300520 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-vck4f" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.343316 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.368508 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.394594 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.622119 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vck4f" event={"ID":"11cdb984-c1c3-4762-a527-8f0243733219","Type":"ContainerStarted","Data":"f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581"} Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.622178 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-vck4f" event={"ID":"11cdb984-c1c3-4762-a527-8f0243733219","Type":"ContainerStarted","Data":"c274059841173552eef789c2a758d1c42ed478a4c34de811e17866f8db9d8b9b"} Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.846552 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6hbdc"] Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.847248 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.848203 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-5tqlx"] Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.848634 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.853395 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.853655 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-s4g62"] Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.853680 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.853811 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.854019 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.854093 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.854310 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.854454 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.855115 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.855268 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.855393 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.861800 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:47Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.866710 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.870110 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.881523 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.881865 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.881975 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.883032 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.883535 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-zqf8t"] Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.884274 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.884389 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.915631 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.915790 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.915961 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955552 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-ovn\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955590 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-etc-kubernetes\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955608 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-etc-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955625 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7cvw\" (UniqueName: \"kubernetes.io/projected/ce60e1d4-6433-477d-89be-6ff9354dd0a4-kube-api-access-s7cvw\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955650 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-log-socket\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955665 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-socket-dir-parent\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955679 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-kubelet\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955693 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-systemd-units\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955707 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ce60e1d4-6433-477d-89be-6ff9354dd0a4-proxy-tls\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955720 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2d099499-bf36-4f4f-a556-47f9351394d3-cni-binary-copy\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955734 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-netns\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955747 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-script-lib\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955760 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-cni-multus\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955777 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-multus-certs\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955791 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955803 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce60e1d4-6433-477d-89be-6ff9354dd0a4-mcd-auth-proxy-config\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955818 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-netns\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955835 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-systemd\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955850 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-system-cni-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955866 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-hostroot\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955879 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2d099499-bf36-4f4f-a556-47f9351394d3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955893 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-bin\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955907 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-k8s-cni-cncf-io\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955919 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-kubelet\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955933 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-slash\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955958 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-cni-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955973 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-conf-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.955987 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhmnr\" (UniqueName: \"kubernetes.io/projected/2d099499-bf36-4f4f-a556-47f9351394d3-kube-api-access-vhmnr\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956005 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-var-lib-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956019 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-cnibin\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956033 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-cni-binary-copy\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956048 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-system-cni-dir\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956061 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-cnibin\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956078 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ce60e1d4-6433-477d-89be-6ff9354dd0a4-rootfs\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956104 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-config\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956117 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-env-overrides\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956133 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-os-release\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956145 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-cni-bin\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956160 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-os-release\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956174 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956194 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-ovn-kubernetes\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956210 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-netd\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956246 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-daemon-config\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956260 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovn-node-metrics-cert\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956274 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88p9n\" (UniqueName: \"kubernetes.io/projected/8dc35dac-41a2-4bc1-ad26-5f515126921e-kube-api-access-88p9n\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956289 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dr8hl\" (UniqueName: \"kubernetes.io/projected/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-kube-api-access-dr8hl\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956303 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-node-log\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:47 crc kubenswrapper[4669]: I1210 15:20:47.956321 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061532 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-script-lib\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061571 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2d099499-bf36-4f4f-a556-47f9351394d3-cni-binary-copy\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061588 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-netns\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061606 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061622 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce60e1d4-6433-477d-89be-6ff9354dd0a4-mcd-auth-proxy-config\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061639 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-cni-multus\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061663 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-multus-certs\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061681 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-netns\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061696 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-systemd\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061710 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-system-cni-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061727 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-hostroot\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061742 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2d099499-bf36-4f4f-a556-47f9351394d3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061759 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-bin\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061775 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-k8s-cni-cncf-io\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061791 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-kubelet\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061773 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-netns\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061850 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061834 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-slash\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061807 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-slash\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061912 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-cni-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061928 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-conf-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061946 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhmnr\" (UniqueName: \"kubernetes.io/projected/2d099499-bf36-4f4f-a556-47f9351394d3-kube-api-access-vhmnr\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061964 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-var-lib-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061978 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-cnibin\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.061993 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-cni-binary-copy\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062008 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-system-cni-dir\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062024 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-cnibin\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062052 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-config\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062067 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-env-overrides\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062084 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ce60e1d4-6433-477d-89be-6ff9354dd0a4-rootfs\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062099 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-cni-bin\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062116 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-os-release\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062130 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062146 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-os-release\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062171 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-ovn-kubernetes\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062185 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-netd\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062199 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88p9n\" (UniqueName: \"kubernetes.io/projected/8dc35dac-41a2-4bc1-ad26-5f515126921e-kube-api-access-88p9n\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062210 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-script-lib\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062738 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-daemon-config\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062228 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-daemon-config\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062775 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovn-node-metrics-cert\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062794 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dr8hl\" (UniqueName: \"kubernetes.io/projected/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-kube-api-access-dr8hl\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062805 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce60e1d4-6433-477d-89be-6ff9354dd0a4-mcd-auth-proxy-config\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062844 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-node-log\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062810 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-node-log\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062871 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062889 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-ovn\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062906 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-etc-kubernetes\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062888 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062942 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-etc-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062963 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-cni-multus\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062983 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-multus-certs\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062921 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-etc-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063016 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-systemd\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063048 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7cvw\" (UniqueName: \"kubernetes.io/projected/ce60e1d4-6433-477d-89be-6ff9354dd0a4-kube-api-access-s7cvw\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063067 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-hostroot\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063074 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-log-socket\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063109 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-kubelet\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063135 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-systemd-units\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063153 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ce60e1d4-6433-477d-89be-6ff9354dd0a4-proxy-tls\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063169 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-socket-dir-parent\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063298 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-socket-dir-parent\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063002 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-netns\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063391 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-env-overrides\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063425 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/ce60e1d4-6433-477d-89be-6ff9354dd0a4-rootfs\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063448 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-cni-bin\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063052 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-system-cni-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063579 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-log-socket\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063594 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-config\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063611 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-kubelet\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063642 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-systemd-units\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063642 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-os-release\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063902 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-bin\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063931 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-run-k8s-cni-cncf-io\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.063952 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-host-var-lib-kubelet\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064096 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-cni-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064116 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-multus-conf-dir\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064165 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-cni-binary-copy\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064200 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-var-lib-openvswitch\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064248 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-cnibin\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064272 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-system-cni-dir\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064296 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-ovn-kubernetes\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064311 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-tuning-conf-dir\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064328 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-os-release\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.062803 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/2d099499-bf36-4f4f-a556-47f9351394d3-cni-binary-copy\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064351 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064365 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-netd\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064377 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-ovn\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064385 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/2d099499-bf36-4f4f-a556-47f9351394d3-cnibin\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064403 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-etc-kubernetes\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.064718 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/2d099499-bf36-4f4f-a556-47f9351394d3-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.069748 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ce60e1d4-6433-477d-89be-6ff9354dd0a4-proxy-tls\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.073930 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovn-node-metrics-cert\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.101393 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7cvw\" (UniqueName: \"kubernetes.io/projected/ce60e1d4-6433-477d-89be-6ff9354dd0a4-kube-api-access-s7cvw\") pod \"machine-config-daemon-5tqlx\" (UID: \"ce60e1d4-6433-477d-89be-6ff9354dd0a4\") " pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.105753 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhmnr\" (UniqueName: \"kubernetes.io/projected/2d099499-bf36-4f4f-a556-47f9351394d3-kube-api-access-vhmnr\") pod \"multus-additional-cni-plugins-zqf8t\" (UID: \"2d099499-bf36-4f4f-a556-47f9351394d3\") " pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.115031 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88p9n\" (UniqueName: \"kubernetes.io/projected/8dc35dac-41a2-4bc1-ad26-5f515126921e-kube-api-access-88p9n\") pod \"ovnkube-node-6hbdc\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.119350 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.119695 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dr8hl\" (UniqueName: \"kubernetes.io/projected/3dda8be1-e5bc-42a3-820e-4285b75bf8c2-kube-api-access-dr8hl\") pod \"multus-s4g62\" (UID: \"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\") " pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.217514 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.218063 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.218717 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-s4g62" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.219302 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.251058 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.283030 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.324603 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.359813 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.391716 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.397962 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:48 crc kubenswrapper[4669]: E1210 15:20:48.398105 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.398488 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:48 crc kubenswrapper[4669]: E1210 15:20:48.398559 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.398601 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:48 crc kubenswrapper[4669]: E1210 15:20:48.398675 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.431290 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.475442 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.511103 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.556403 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.571840 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.590108 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.602315 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.616599 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.626642 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.628380 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d" exitCode=0 Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.628444 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.628470 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"8069245d67d54f793c1dacdded1d83c098ff626276b86e3a9ed3336eac943993"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.632176 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.632205 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"b4e6d446366347ebbb97ff66630a978db13c04ab51efd77bebc4b3d21a85f69d"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.633299 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerStarted","Data":"96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.633340 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerStarted","Data":"2dca9e3d23b607befe20eea1824e5ddc36c46a8cb40f6b040df2cd482d1fca49"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.634480 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerStarted","Data":"205b76336de9ce14e0fa9fb440ada030f917a4cb39f30578a78fa7938c66b60b"} Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.700264 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.724492 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.743828 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.818967 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.896744 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.926362 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.950025 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:48 crc kubenswrapper[4669]: I1210 15:20:48.978234 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:48Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.058150 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.076508 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.076678 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.076783 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.076839 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:53.076823194 +0000 UTC m=+26.993769821 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.077335 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:20:53.077322847 +0000 UTC m=+26.994269484 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.098413 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.173004 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.177342 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.177430 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.177455 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177561 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177588 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177671 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:53.177653127 +0000 UTC m=+27.094599754 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177677 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177714 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177726 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177759 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177716 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177856 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:53.17779232 +0000 UTC m=+27.094738947 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.177874 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:20:53.177867192 +0000 UTC m=+27.094813819 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.246511 4669 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.248332 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.248392 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.248405 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.248583 4669 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.272799 4669 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.273050 4669 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.274422 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.274462 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.274474 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.274495 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.274508 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.300911 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.339922 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.342632 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.349024 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.349066 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.349076 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.349093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.349102 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.359780 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.370593 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.378889 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.379086 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.379559 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.379646 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.379703 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.391570 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.402897 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.413148 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.418037 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.418079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.418091 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.418109 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.418120 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.469390 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.469664 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.476288 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.476325 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.476336 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.476353 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.476363 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.502759 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: E1210 15:20:49.502936 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.504898 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.504921 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.504929 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.504941 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.504951 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.512770 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.551653 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.574581 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.611899 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.611972 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.611983 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.612000 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.612015 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.612670 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.642121 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.642422 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.642440 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.642455 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.644270 4669 generic.go:334] "Generic (PLEG): container finished" podID="2d099499-bf36-4f4f-a556-47f9351394d3" containerID="52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b" exitCode=0 Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.644341 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerDied","Data":"52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.647770 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.657298 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.695891 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.714831 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.714863 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.714872 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.714889 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.714900 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.727483 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.757064 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.775557 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.798698 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.821120 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.821182 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.821197 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.821242 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.821281 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.827013 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.847180 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.874614 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.918922 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.930589 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.930629 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.930639 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.930662 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.930680 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:49Z","lastTransitionTime":"2025-12-10T15:20:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:49 crc kubenswrapper[4669]: I1210 15:20:49.947802 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:49Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.032630 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.032658 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.032665 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.032678 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.032702 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.085901 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.135034 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.135100 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.135116 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.135142 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.135155 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.151378 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.184069 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.237268 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.237302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.237312 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.237359 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.237371 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.298573 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.339870 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.339902 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.339911 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.339926 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.339939 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.400920 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.400990 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:50 crc kubenswrapper[4669]: E1210 15:20:50.401082 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.401110 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:50 crc kubenswrapper[4669]: E1210 15:20:50.401178 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:50 crc kubenswrapper[4669]: E1210 15:20:50.401259 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.443963 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.444253 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.444331 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.444439 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.444522 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.547600 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.547636 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.547646 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.547663 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.547675 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.650637 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.651035 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.651124 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.651192 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.651279 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.654940 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.655084 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.656425 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerStarted","Data":"511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.678362 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.691989 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.706857 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.731191 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.758253 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.758331 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.758344 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.758366 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.758381 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.776579 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-h76v4"] Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.777325 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.786029 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.786138 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.786355 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.786449 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.788838 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.799293 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/322826d8-4f6d-4ef8-b724-7d7a76490356-host\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.799340 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/322826d8-4f6d-4ef8-b724-7d7a76490356-serviceca\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.799373 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfzjg\" (UniqueName: \"kubernetes.io/projected/322826d8-4f6d-4ef8-b724-7d7a76490356-kube-api-access-vfzjg\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.813751 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.829386 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.839890 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.854427 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.876411 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.876458 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.876468 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.876488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.876498 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.885883 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.900167 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.900407 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/322826d8-4f6d-4ef8-b724-7d7a76490356-host\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.900446 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/322826d8-4f6d-4ef8-b724-7d7a76490356-serviceca\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.900469 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfzjg\" (UniqueName: \"kubernetes.io/projected/322826d8-4f6d-4ef8-b724-7d7a76490356-kube-api-access-vfzjg\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.900795 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/322826d8-4f6d-4ef8-b724-7d7a76490356-host\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.902188 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/322826d8-4f6d-4ef8-b724-7d7a76490356-serviceca\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.919185 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfzjg\" (UniqueName: \"kubernetes.io/projected/322826d8-4f6d-4ef8-b724-7d7a76490356-kube-api-access-vfzjg\") pod \"node-ca-h76v4\" (UID: \"322826d8-4f6d-4ef8-b724-7d7a76490356\") " pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.923643 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.934727 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.948387 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.964576 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.979906 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.979934 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.979942 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.979975 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.979986 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:50Z","lastTransitionTime":"2025-12-10T15:20:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:50 crc kubenswrapper[4669]: I1210 15:20:50.982783 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:50Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.002312 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.017910 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.031170 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.043364 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.055198 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.066445 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.081370 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.082745 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.082791 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.082805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.082825 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.082840 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.098716 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.116673 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.135769 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.146278 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.162291 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.182637 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.197079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.197372 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.197457 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.197574 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.197672 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.229778 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-h76v4" Dec 10 15:20:51 crc kubenswrapper[4669]: W1210 15:20:51.258599 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod322826d8_4f6d_4ef8_b724_7d7a76490356.slice/crio-a4b360254bb130314e73a86e5a652683730adbab53fa7a02ec65e4053cecf30a WatchSource:0}: Error finding container a4b360254bb130314e73a86e5a652683730adbab53fa7a02ec65e4053cecf30a: Status 404 returned error can't find the container with id a4b360254bb130314e73a86e5a652683730adbab53fa7a02ec65e4053cecf30a Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.301559 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.301898 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.301907 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.301932 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.301943 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.406882 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.406929 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.406939 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.406960 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.406970 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.514940 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.514999 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.515010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.515027 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.515039 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.616944 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.616970 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.616978 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.616992 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.617001 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.661236 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-h76v4" event={"ID":"322826d8-4f6d-4ef8-b724-7d7a76490356","Type":"ContainerStarted","Data":"40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.661309 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-h76v4" event={"ID":"322826d8-4f6d-4ef8-b724-7d7a76490356","Type":"ContainerStarted","Data":"a4b360254bb130314e73a86e5a652683730adbab53fa7a02ec65e4053cecf30a"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.678173 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.700431 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.717972 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.722071 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.722106 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.722115 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.722131 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.722145 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.740722 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.761666 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.775857 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.801044 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.817138 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.825805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.825848 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.825860 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.825878 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.825892 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.833568 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.847360 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.860488 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.917559 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.927703 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.927742 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.927753 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.927770 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.927784 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:51Z","lastTransitionTime":"2025-12-10T15:20:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:51 crc kubenswrapper[4669]: I1210 15:20:51.942861 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.029810 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:51Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.031681 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.031732 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.031742 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.031761 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.031773 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.062661 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.135560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.136013 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.136089 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.136172 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.136282 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.238558 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.238599 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.238614 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.238635 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.238650 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.342696 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.342751 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.342771 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.342796 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.342818 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.397389 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:52 crc kubenswrapper[4669]: E1210 15:20:52.397621 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.398196 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:52 crc kubenswrapper[4669]: E1210 15:20:52.398317 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.398398 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:52 crc kubenswrapper[4669]: E1210 15:20:52.398471 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.450745 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.450807 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.450821 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.450843 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.450857 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.472822 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.476640 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.491727 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.503911 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.523079 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.537078 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.553492 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.553556 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.553572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.553597 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.553613 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.556514 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.570957 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.584855 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.601356 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.616259 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.632481 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.647373 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.657023 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.657083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.657097 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.657121 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.657134 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.662439 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.668234 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.670496 4669 generic.go:334] "Generic (PLEG): container finished" podID="2d099499-bf36-4f4f-a556-47f9351394d3" containerID="511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29" exitCode=0 Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.670599 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerDied","Data":"511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.680610 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.696310 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.716146 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.738849 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.751051 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.766113 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.766164 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.766175 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.766192 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.766202 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.772976 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.786313 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.802565 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.820340 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.834115 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.847515 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.860880 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.869290 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.869330 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.869341 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.869360 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.869372 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.874806 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.887963 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.901563 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.913953 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.925090 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.940694 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:52Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.971723 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.971772 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.971785 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.971803 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:52 crc kubenswrapper[4669]: I1210 15:20:52.971819 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:52Z","lastTransitionTime":"2025-12-10T15:20:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.074502 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.074553 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.074571 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.074589 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.074601 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.126240 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.126469 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:21:01.126435203 +0000 UTC m=+35.043381830 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.126538 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.126647 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.126740 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:01.126720711 +0000 UTC m=+35.043667338 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.177809 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.177889 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.177905 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.177933 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.177949 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.228034 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.228096 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.228123 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228290 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228335 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228350 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228403 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:01.228385922 +0000 UTC m=+35.145332549 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228784 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228825 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:01.228816043 +0000 UTC m=+35.145762670 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228873 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228887 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228895 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:53 crc kubenswrapper[4669]: E1210 15:20:53.228919 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:01.228911306 +0000 UTC m=+35.145857943 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.280714 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.280751 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.280762 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.280791 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.280802 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.383599 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.383651 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.383665 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.383685 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.383701 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.486168 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.486209 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.486244 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.486259 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.486271 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.589295 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.589357 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.589377 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.589398 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.589412 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.677455 4669 generic.go:334] "Generic (PLEG): container finished" podID="2d099499-bf36-4f4f-a556-47f9351394d3" containerID="644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f" exitCode=0 Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.677508 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerDied","Data":"644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.692654 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.692707 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.692720 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.692740 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.692756 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.693365 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.715130 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.731075 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.751057 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.771950 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.787332 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.802616 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.805859 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.805902 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.805918 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.805943 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.805958 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.816637 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.828607 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.844431 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.860695 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.876033 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.892205 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.905809 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.908907 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.908947 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.908959 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.908977 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.908991 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:53Z","lastTransitionTime":"2025-12-10T15:20:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:53 crc kubenswrapper[4669]: I1210 15:20:53.919915 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:53Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.010922 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.010960 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.010968 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.010983 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.010991 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.113768 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.113814 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.113828 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.113848 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.113862 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.217205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.217274 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.217299 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.217318 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.217331 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.319748 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.319967 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.319977 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.319997 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.320012 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.397437 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.397490 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:54 crc kubenswrapper[4669]: E1210 15:20:54.397577 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:54 crc kubenswrapper[4669]: E1210 15:20:54.397652 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.397882 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:54 crc kubenswrapper[4669]: E1210 15:20:54.398128 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.422740 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.422789 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.422801 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.422818 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.422831 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.525501 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.525560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.525572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.525593 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.525604 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.628079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.628135 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.628154 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.628563 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.628603 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.683974 4669 generic.go:334] "Generic (PLEG): container finished" podID="2d099499-bf36-4f4f-a556-47f9351394d3" containerID="903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244" exitCode=0 Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.684060 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerDied","Data":"903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.690318 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.690543 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.690580 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.705131 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.725177 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.730989 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.731005 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.731013 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.731024 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.731033 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.740258 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.754128 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.769736 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.770272 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.770690 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.782966 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.798665 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.811512 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.823731 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.839814 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.839861 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.839872 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.839890 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.839903 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.873858 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.905392 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.934570 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.943485 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.943527 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.943537 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.943552 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.943564 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:54Z","lastTransitionTime":"2025-12-10T15:20:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.961191 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.974298 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:54 crc kubenswrapper[4669]: I1210 15:20:54.991079 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:54Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.007345 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.043891 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.045564 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.045596 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.045603 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.045618 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.045627 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.076205 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.089114 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.098525 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.114096 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.137847 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.147917 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.147951 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.147964 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.147982 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.147994 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.151141 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.167499 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.179277 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.192465 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.204969 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.220409 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.234800 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.247975 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.250777 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.250809 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.250825 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.250841 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.250852 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.353085 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.353424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.353437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.353453 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.353461 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.471730 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.471766 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.471775 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.471792 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.471802 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.574250 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.574301 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.574349 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.574371 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.574383 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.678648 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.678715 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.678733 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.678759 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.678779 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.697904 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.698795 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerStarted","Data":"0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.715538 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.736781 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.754227 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.781186 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.781293 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.781308 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.781334 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.781352 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.791811 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.807160 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.826440 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.842338 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.919917 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.919970 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.919982 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.920004 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.920016 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:55Z","lastTransitionTime":"2025-12-10T15:20:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.925113 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.951559 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.962512 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:55 crc kubenswrapper[4669]: I1210 15:20:55.988107 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:55Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.011756 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.024644 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.034932 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.043844 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.061627 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.061654 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.061663 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.061677 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.061686 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.248020 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.248068 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.248080 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.248101 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.248117 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.354055 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.354097 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.354108 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.354127 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.354143 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.397081 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:56 crc kubenswrapper[4669]: E1210 15:20:56.397277 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.397848 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:56 crc kubenswrapper[4669]: E1210 15:20:56.397911 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.397958 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:56 crc kubenswrapper[4669]: E1210 15:20:56.398004 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.419796 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.434647 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.452078 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.463694 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.463785 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.463802 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.463841 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.463866 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.472386 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.487328 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.503103 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.519479 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.529708 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.547337 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.560339 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.568941 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.568976 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.568988 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.569004 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.569015 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.571107 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.593642 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.612145 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.628321 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.648422 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:56Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.671437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.671497 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.671510 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.671527 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.671563 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.700553 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.775083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.775119 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.775128 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.775146 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.775157 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.878521 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.878564 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.878574 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.878591 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.878630 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.980866 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.980898 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.980907 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.980924 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:56 crc kubenswrapper[4669]: I1210 15:20:56.980935 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:56Z","lastTransitionTime":"2025-12-10T15:20:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.083736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.083792 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.083803 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.083816 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.083825 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.185964 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.185994 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.186003 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.186016 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.186026 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.288508 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.288580 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.288603 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.288632 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.288655 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.391290 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.391437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.391456 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.392090 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.392140 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.495398 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.495443 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.495458 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.495480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.495495 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.601773 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.601815 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.601836 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.601854 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.601869 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.703424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.703471 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.703482 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.703496 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.703506 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.806867 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.806928 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.806958 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.806982 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.807001 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.910807 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.910914 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.910933 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.910958 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:57 crc kubenswrapper[4669]: I1210 15:20:57.910982 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:57Z","lastTransitionTime":"2025-12-10T15:20:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.014349 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.014418 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.014438 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.014469 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.014489 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.117650 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.117702 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.117715 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.117736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.117755 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.221903 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.221949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.221965 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.221989 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.222006 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.325724 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.325770 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.325791 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.325810 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.325822 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.397885 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:20:58 crc kubenswrapper[4669]: E1210 15:20:58.398145 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.399191 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.399335 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:20:58 crc kubenswrapper[4669]: E1210 15:20:58.399494 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:20:58 crc kubenswrapper[4669]: E1210 15:20:58.399635 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.429383 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.429424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.429436 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.429453 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.429463 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.531311 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.531348 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.531360 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.531376 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.531388 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.633575 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.633611 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.633647 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.633669 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.633681 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.709153 4669 generic.go:334] "Generic (PLEG): container finished" podID="2d099499-bf36-4f4f-a556-47f9351394d3" containerID="0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25" exitCode=0 Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.709238 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerDied","Data":"0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.732356 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.736780 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.736818 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.736831 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.736853 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.736866 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.746841 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.757723 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.772911 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.785569 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.800606 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.834997 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.841093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.841169 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.841185 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.841241 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.841258 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.847456 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.864905 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.896053 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.910732 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.923445 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.939601 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.945577 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.945622 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.945632 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.945664 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.945687 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:58Z","lastTransitionTime":"2025-12-10T15:20:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.955132 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:58 crc kubenswrapper[4669]: I1210 15:20:58.975199 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:58Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.047799 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.047846 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.047859 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.047876 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.047891 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.150374 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.150425 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.150441 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.150462 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.150477 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.253391 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.253429 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.253439 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.253452 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.253462 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.356764 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.356807 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.356816 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.356833 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.356856 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.460287 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.460345 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.460356 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.460380 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.460392 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.563613 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.563675 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.563686 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.563708 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.563721 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.666385 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.666463 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.666484 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.666513 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.666537 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.719071 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerDied","Data":"3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.719096 4669 generic.go:334] "Generic (PLEG): container finished" podID="2d099499-bf36-4f4f-a556-47f9351394d3" containerID="3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58" exitCode=0 Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.725835 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/0.log" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.730784 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268" exitCode=1 Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.730852 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.732303 4669 scope.go:117] "RemoveContainer" containerID="1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.739393 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.759397 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.769990 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.770032 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.770041 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.770062 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.770074 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.777626 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.777660 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.777670 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.777685 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.777695 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.780203 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: E1210 15:20:59.791996 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.794013 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.796191 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.796258 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.796272 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.796296 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.796309 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.809007 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: E1210 15:20:59.811273 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.818287 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.818344 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.818366 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.818391 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.818409 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.828062 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: E1210 15:20:59.834539 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.838166 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.838199 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.838231 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.838252 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.838264 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.848411 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: E1210 15:20:59.850789 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.855127 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.855183 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.855200 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.855243 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.855259 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.868449 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: E1210 15:20:59.876933 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: E1210 15:20:59.877057 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.878693 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.878746 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.878821 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.878842 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.878857 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.893718 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.908311 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.924044 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.937631 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.953631 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.981873 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.981918 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.981989 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.982002 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.982030 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.982044 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:20:59Z","lastTransitionTime":"2025-12-10T15:20:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:20:59 crc kubenswrapper[4669]: I1210 15:20:59.993722 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:20:59Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.010589 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.026833 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.042395 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.057428 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.071411 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.083777 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.086038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.086073 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.086084 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.086103 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.086115 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.100955 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\" 5788 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:20:59.406677 5788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:20:59.406699 5788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:20:59.406718 5788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1210 15:20:59.406740 5788 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 15:20:59.406753 5788 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 15:20:59.406758 5788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 15:20:59.406809 5788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 15:20:59.406840 5788 factory.go:656] Stopping watch factory\\\\nI1210 15:20:59.406863 5788 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:20:59.406892 5788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:20:59.406915 5788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 15:20:59.406923 5788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 15:20:59.406931 5788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 15:20:59.406940 5788 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 15:20:59.406947 5788 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.112339 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.126369 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.147441 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.160844 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.174857 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.188866 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.188912 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.188931 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.188952 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.188967 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.191778 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.204147 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.221171 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:00Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.290637 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.290700 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.290715 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.290736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.290754 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.394003 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.394054 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.394063 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.394082 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.394092 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.397478 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.397562 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.397590 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:00 crc kubenswrapper[4669]: E1210 15:21:00.397672 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:00 crc kubenswrapper[4669]: E1210 15:21:00.397800 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:00 crc kubenswrapper[4669]: E1210 15:21:00.397987 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.497627 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.497669 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.497678 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.497698 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.497710 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.600572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.600612 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.600623 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.600639 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.600651 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.704628 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.704719 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.704748 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.704780 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.704807 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.807670 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.807736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.807754 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.807780 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.807799 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.914275 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.914327 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.914339 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.914372 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:00 crc kubenswrapper[4669]: I1210 15:21:00.914384 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:00Z","lastTransitionTime":"2025-12-10T15:21:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.018617 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.018675 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.018699 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.018726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.018747 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.122847 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.122923 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.122949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.122980 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.123001 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.173920 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.174080 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.174258 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.174377 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:17.17435121 +0000 UTC m=+51.091297847 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.174650 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:21:17.174598587 +0000 UTC m=+51.091545214 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.226444 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.226696 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.226836 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.226930 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.227032 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.275805 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.276104 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.276319 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:17.276286119 +0000 UTC m=+51.193232936 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.276432 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.276475 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.276491 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.276572 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:17.276549705 +0000 UTC m=+51.193496342 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.276798 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.277021 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.278411 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.278442 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.278456 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:01 crc kubenswrapper[4669]: E1210 15:21:01.278528 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:17.278516353 +0000 UTC m=+51.195462970 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.330158 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.330236 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.330250 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.330271 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.330285 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.433879 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.433949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.433969 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.433995 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.434013 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.537420 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.537467 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.537477 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.537492 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.537501 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.543164 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl"] Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.543800 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.545702 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.545894 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.569770 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.610112 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.630342 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.640294 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.640345 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.640360 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.640381 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.640399 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.646311 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.687801 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.695355 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/82f00eda-0389-4ff1-ae1c-ce2790df3a44-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.695424 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/82f00eda-0389-4ff1-ae1c-ce2790df3a44-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.695478 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/82f00eda-0389-4ff1-ae1c-ce2790df3a44-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.695515 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmn42\" (UniqueName: \"kubernetes.io/projected/82f00eda-0389-4ff1-ae1c-ce2790df3a44-kube-api-access-wmn42\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.710037 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.730019 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.740722 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/0.log" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.741902 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.741933 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.741942 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.741958 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.741968 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.743976 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.744107 4669 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.747037 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" event={"ID":"2d099499-bf36-4f4f-a556-47f9351394d3","Type":"ContainerStarted","Data":"08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.751642 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.765404 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.774796 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.786324 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.796465 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/82f00eda-0389-4ff1-ae1c-ce2790df3a44-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.796819 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/82f00eda-0389-4ff1-ae1c-ce2790df3a44-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.796842 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmn42\" (UniqueName: \"kubernetes.io/projected/82f00eda-0389-4ff1-ae1c-ce2790df3a44-kube-api-access-wmn42\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.796874 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/82f00eda-0389-4ff1-ae1c-ce2790df3a44-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.797500 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/82f00eda-0389-4ff1-ae1c-ce2790df3a44-env-overrides\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.797887 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/82f00eda-0389-4ff1-ae1c-ce2790df3a44-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.803818 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/82f00eda-0389-4ff1-ae1c-ce2790df3a44-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.809068 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.821321 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.822364 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmn42\" (UniqueName: \"kubernetes.io/projected/82f00eda-0389-4ff1-ae1c-ce2790df3a44-kube-api-access-wmn42\") pod \"ovnkube-control-plane-749d76644c-jq6vl\" (UID: \"82f00eda-0389-4ff1-ae1c-ce2790df3a44\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.843398 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\" 5788 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:20:59.406677 5788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:20:59.406699 5788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:20:59.406718 5788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1210 15:20:59.406740 5788 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 15:20:59.406753 5788 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 15:20:59.406758 5788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 15:20:59.406809 5788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 15:20:59.406840 5788 factory.go:656] Stopping watch factory\\\\nI1210 15:20:59.406863 5788 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:20:59.406892 5788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:20:59.406915 5788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 15:20:59.406923 5788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 15:20:59.406931 5788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 15:20:59.406940 5788 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 15:20:59.406947 5788 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.844752 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.844789 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.844799 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.844816 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.844830 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.865381 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.875558 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.892983 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.910544 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.948724 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.948753 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.948762 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.948775 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.948784 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:01Z","lastTransitionTime":"2025-12-10T15:21:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.950401 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.974537 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:01 crc kubenswrapper[4669]: I1210 15:21:01.997987 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\" 5788 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:20:59.406677 5788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:20:59.406699 5788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:20:59.406718 5788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1210 15:20:59.406740 5788 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 15:20:59.406753 5788 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 15:20:59.406758 5788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 15:20:59.406809 5788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 15:20:59.406840 5788 factory.go:656] Stopping watch factory\\\\nI1210 15:20:59.406863 5788 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:20:59.406892 5788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:20:59.406915 5788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 15:20:59.406923 5788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 15:20:59.406931 5788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 15:20:59.406940 5788 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 15:20:59.406947 5788 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:01Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.010174 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.025717 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.041270 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.054090 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.054148 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.054161 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.054179 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.054191 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.058057 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.074600 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.088832 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.104185 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.119100 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.132635 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.147616 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.156887 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.156973 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.157002 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.157027 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.157038 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.162853 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.174617 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.260653 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.260951 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.261043 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.261139 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.261250 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.364267 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.364316 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.364332 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.364356 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.364377 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.397913 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.398093 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.398311 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.398375 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.398401 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.398441 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.467713 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.467784 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.467805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.467834 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.467857 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.571066 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.571120 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.571133 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.571157 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.571171 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.674092 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.674142 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.674155 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.674176 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.674190 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.714048 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-rz9mm"] Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.714846 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.714927 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.743291 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.752522 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/1.log" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.753256 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/0.log" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.757153 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0" exitCode=1 Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.757179 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.757340 4669 scope.go:117] "RemoveContainer" containerID="1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.758271 4669 scope.go:117] "RemoveContainer" containerID="1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0" Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.758498 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.761112 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" event={"ID":"82f00eda-0389-4ff1-ae1c-ce2790df3a44","Type":"ContainerStarted","Data":"bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.761187 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" event={"ID":"82f00eda-0389-4ff1-ae1c-ce2790df3a44","Type":"ContainerStarted","Data":"1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.761204 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" event={"ID":"82f00eda-0389-4ff1-ae1c-ce2790df3a44","Type":"ContainerStarted","Data":"98e611196a1c9633dd224f2d172b949c1d479fb7c87f5cd1e24fba772c17784c"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.768292 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.776776 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.776817 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.776837 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.776856 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.776867 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.798188 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\" 5788 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:20:59.406677 5788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:20:59.406699 5788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:20:59.406718 5788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1210 15:20:59.406740 5788 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 15:20:59.406753 5788 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 15:20:59.406758 5788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 15:20:59.406809 5788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 15:20:59.406840 5788 factory.go:656] Stopping watch factory\\\\nI1210 15:20:59.406863 5788 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:20:59.406892 5788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:20:59.406915 5788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 15:20:59.406923 5788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 15:20:59.406931 5788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 15:20:59.406940 5788 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 15:20:59.406947 5788 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.808351 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pvdj\" (UniqueName: \"kubernetes.io/projected/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-kube-api-access-4pvdj\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.808397 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.810852 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.825360 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.843002 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.856975 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.871133 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.878885 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.878922 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.878963 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.878978 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.878987 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.883488 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.898902 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.909872 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pvdj\" (UniqueName: \"kubernetes.io/projected/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-kube-api-access-4pvdj\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.909960 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.910551 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:02 crc kubenswrapper[4669]: E1210 15:21:02.910628 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:21:03.410607265 +0000 UTC m=+37.327553902 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.912617 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.927412 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.930988 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pvdj\" (UniqueName: \"kubernetes.io/projected/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-kube-api-access-4pvdj\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.947240 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.960150 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.973646 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.981654 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.981705 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.981718 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.981735 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.981746 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:02Z","lastTransitionTime":"2025-12-10T15:21:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:02 crc kubenswrapper[4669]: I1210 15:21:02.999586 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.014664 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.026563 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.038758 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.051268 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.063438 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.076682 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.084407 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.084470 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.084482 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.084506 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.084526 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.095879 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.113091 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.127837 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.139233 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.170485 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.188012 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.188099 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.188115 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.188137 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.188150 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.188455 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.206962 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.235821 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\" 5788 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:20:59.406677 5788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:20:59.406699 5788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:20:59.406718 5788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1210 15:20:59.406740 5788 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 15:20:59.406753 5788 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 15:20:59.406758 5788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 15:20:59.406809 5788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 15:20:59.406840 5788 factory.go:656] Stopping watch factory\\\\nI1210 15:20:59.406863 5788 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:20:59.406892 5788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:20:59.406915 5788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 15:20:59.406923 5788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 15:20:59.406931 5788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 15:20:59.406940 5788 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 15:20:59.406947 5788 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.249891 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.271630 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.291519 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.291551 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.291563 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.291580 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.291591 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.296866 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.312494 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:03Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.394062 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.394114 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.394130 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.394154 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.394169 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.415765 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:03 crc kubenswrapper[4669]: E1210 15:21:03.415997 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:03 crc kubenswrapper[4669]: E1210 15:21:03.416103 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:21:04.416072938 +0000 UTC m=+38.333019775 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.497488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.497531 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.497542 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.497560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.497574 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.600520 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.600562 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.600572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.600589 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.600599 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.703489 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.703525 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.703533 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.703548 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.703558 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.767104 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/1.log" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.806585 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.806647 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.806662 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.807060 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.807098 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.916496 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.916542 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.916552 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.916571 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:03 crc kubenswrapper[4669]: I1210 15:21:03.916584 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:03Z","lastTransitionTime":"2025-12-10T15:21:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.019451 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.019503 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.019513 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.019530 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.019541 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.122183 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.122248 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.122262 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.122304 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.122313 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.225297 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.225337 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.225346 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.225360 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.225369 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.328613 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.328666 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.328684 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.328710 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.328730 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.397650 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.397745 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.397675 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.397674 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:04 crc kubenswrapper[4669]: E1210 15:21:04.397920 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:04 crc kubenswrapper[4669]: E1210 15:21:04.398050 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:04 crc kubenswrapper[4669]: E1210 15:21:04.398187 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:04 crc kubenswrapper[4669]: E1210 15:21:04.398339 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.427009 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:04 crc kubenswrapper[4669]: E1210 15:21:04.427287 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:04 crc kubenswrapper[4669]: E1210 15:21:04.427421 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:21:06.427379693 +0000 UTC m=+40.344326330 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.432189 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.432265 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.432285 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.432307 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.432353 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.536661 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.536741 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.536782 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.536805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.536819 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.639767 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.639836 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.639859 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.639887 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.639909 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.744000 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.744089 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.744110 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.744134 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.744182 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.847494 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.847863 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.847876 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.847896 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.847907 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.950664 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.950709 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.950723 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.950740 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:04 crc kubenswrapper[4669]: I1210 15:21:04.950752 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:04Z","lastTransitionTime":"2025-12-10T15:21:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.053971 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.055097 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.055338 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.055502 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.055628 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.159570 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.159631 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.159652 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.159677 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.159695 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.263188 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.263341 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.263361 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.263386 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.263405 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.366466 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.366553 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.366572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.366595 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.366615 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.469018 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.469059 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.469067 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.469083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.469092 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.571487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.571529 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.571540 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.571555 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.571565 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.674955 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.675010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.675027 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.675058 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.675077 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.778703 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.778768 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.778780 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.778801 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.778815 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.882058 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.882118 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.882132 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.882154 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.882168 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.985753 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.985824 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.985845 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.985871 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:05 crc kubenswrapper[4669]: I1210 15:21:05.985889 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:05Z","lastTransitionTime":"2025-12-10T15:21:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.089659 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.089705 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.089722 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.089745 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.089766 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.192388 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.192445 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.192463 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.192487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.192506 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.295410 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.295449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.295457 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.295475 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.295484 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.397153 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.397372 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.397378 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.397458 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:06 crc kubenswrapper[4669]: E1210 15:21:06.397567 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:06 crc kubenswrapper[4669]: E1210 15:21:06.397783 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:06 crc kubenswrapper[4669]: E1210 15:21:06.397831 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:06 crc kubenswrapper[4669]: E1210 15:21:06.397927 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.398427 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.398465 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.398476 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.398493 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.398506 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.412461 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.426926 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.441636 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.454266 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:06 crc kubenswrapper[4669]: E1210 15:21:06.454399 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:06 crc kubenswrapper[4669]: E1210 15:21:06.454448 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:21:10.454436097 +0000 UTC m=+44.371382724 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.456011 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.468061 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.479524 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.495032 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.501562 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.501622 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.501643 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.501666 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.501682 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.508637 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.521079 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.546276 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.567769 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.586081 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.601080 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.604123 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.604170 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.604187 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.604208 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.604262 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.618602 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.650365 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.664931 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.683172 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c7dceaa3ec32d900208e7606e2d89fc87b320250a8ddd2f004ac9e47cd9f268\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"message\\\":\\\" 5788 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:20:59.406677 5788 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:20:59.406699 5788 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:20:59.406718 5788 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1210 15:20:59.406740 5788 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1210 15:20:59.406753 5788 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1210 15:20:59.406758 5788 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1210 15:20:59.406809 5788 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1210 15:20:59.406840 5788 factory.go:656] Stopping watch factory\\\\nI1210 15:20:59.406863 5788 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:20:59.406892 5788 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:20:59.406915 5788 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1210 15:20:59.406923 5788 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1210 15:20:59.406931 5788 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1210 15:20:59.406940 5788 handler.go:208] Removed *v1.Node event handler 2\\\\nI1210 15:20:59.406947 5788 handler.go:208] Removed *v1.Node event handler 7\\\\nI1210 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:06Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.706888 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.706936 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.706945 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.706959 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.706970 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.809437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.809472 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.809482 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.809497 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.809507 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.913458 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.913494 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.913505 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.913522 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:06 crc kubenswrapper[4669]: I1210 15:21:06.913536 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:06Z","lastTransitionTime":"2025-12-10T15:21:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.019498 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.019548 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.019565 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.019589 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.019604 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.123527 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.123590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.123611 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.123636 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.123654 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.226380 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.227357 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.227406 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.227426 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.227438 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.330473 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.330519 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.330529 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.330551 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.330577 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.434280 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.434332 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.434346 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.434370 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.434384 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.536842 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.536899 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.536919 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.536941 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.536958 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.639610 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.639857 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.639968 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.640056 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.640135 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.744489 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.744643 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.744670 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.744742 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.744770 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.847999 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.848045 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.848055 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.848071 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.848081 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.952012 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.952113 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.952131 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.952182 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:07 crc kubenswrapper[4669]: I1210 15:21:07.952200 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:07Z","lastTransitionTime":"2025-12-10T15:21:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.055973 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.056029 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.056059 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.056078 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.056091 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.158726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.158805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.158827 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.158861 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.158884 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.296165 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.296207 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.296243 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.296258 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.296269 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.397051 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.397057 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.397088 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.397335 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:08 crc kubenswrapper[4669]: E1210 15:21:08.397459 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:08 crc kubenswrapper[4669]: E1210 15:21:08.397568 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:08 crc kubenswrapper[4669]: E1210 15:21:08.397708 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:08 crc kubenswrapper[4669]: E1210 15:21:08.397856 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.404033 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.404098 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.404140 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.404170 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.404196 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.507852 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.507931 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.507954 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.507983 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.508003 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.611077 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.611115 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.611125 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.611144 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.611155 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.714344 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.714408 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.714421 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.714442 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.714456 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.817371 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.817437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.817449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.817472 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.817486 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.920860 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.920922 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.920931 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.920952 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:08 crc kubenswrapper[4669]: I1210 15:21:08.920966 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:08Z","lastTransitionTime":"2025-12-10T15:21:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.024692 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.024757 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.024770 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.024791 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.024805 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.127057 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.127115 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.127128 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.127147 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.127159 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.230525 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.230599 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.230613 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.230646 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.230706 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.336426 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.336504 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.336526 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.336555 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.336579 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.439834 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.439903 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.439926 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.439957 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.439979 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.542825 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.542861 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.542872 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.542885 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.542894 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.645339 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.645468 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.645515 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.645536 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.645553 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.748904 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.748971 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.748994 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.749021 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.749042 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.851917 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.851992 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.852016 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.852045 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.852065 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.955010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.955045 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.955056 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.955072 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:09 crc kubenswrapper[4669]: I1210 15:21:09.955083 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:09Z","lastTransitionTime":"2025-12-10T15:21:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.057609 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.057642 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.057651 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.057666 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.057676 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.160519 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.160567 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.160605 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.160624 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.160635 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.171434 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.171467 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.171478 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.171500 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.171521 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.193140 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:10Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.198661 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.198716 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.198737 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.198763 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.198781 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.220199 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:10Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.224956 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.225028 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.225046 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.225070 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.225091 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.245288 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:10Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.250700 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.250742 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.250753 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.250769 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.250781 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.270500 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:10Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.275732 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.275810 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.275836 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.275865 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.275889 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.300661 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:10Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:10Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.300827 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.303497 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.303536 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.303555 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.303581 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.303599 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.397932 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.398018 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.398170 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.398263 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.398265 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.398376 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.398614 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.398786 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.407187 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.407380 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.407401 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.407498 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.407601 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.510677 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.511099 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.511313 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.511515 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.511720 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.519618 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.519862 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:10 crc kubenswrapper[4669]: E1210 15:21:10.519968 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:21:18.519934803 +0000 UTC m=+52.436881470 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.615555 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.615637 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.615665 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.615698 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.615725 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.718951 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.719011 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.719038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.719067 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.719079 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.822334 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.822406 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.822425 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.822448 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.822468 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.925920 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.925978 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.925988 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.926010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:10 crc kubenswrapper[4669]: I1210 15:21:10.926021 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:10Z","lastTransitionTime":"2025-12-10T15:21:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.029082 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.029126 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.029140 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.029159 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.029176 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.132313 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.132354 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.132366 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.132383 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.132394 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.234649 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.234695 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.234705 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.234722 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.234735 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.336978 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.337032 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.337047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.337072 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.337099 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.439895 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.439994 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.440014 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.440038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.440060 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.543469 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.543525 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.543536 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.543556 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.543567 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.647410 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.647490 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.647514 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.647549 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.647572 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.751437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.751499 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.751516 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.751542 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.751558 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.855408 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.855503 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.855533 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.855559 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.855576 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.959743 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.959809 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.959832 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.959860 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:11 crc kubenswrapper[4669]: I1210 15:21:11.959881 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:11Z","lastTransitionTime":"2025-12-10T15:21:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.063334 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.063417 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.063441 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.063473 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.063495 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.166063 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.166113 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.166152 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.166190 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.166205 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.269749 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.269825 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.269843 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.269872 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.269892 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.373030 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.373094 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.373112 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.373136 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.373154 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.397458 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.397487 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.397553 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.397486 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:12 crc kubenswrapper[4669]: E1210 15:21:12.397679 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:12 crc kubenswrapper[4669]: E1210 15:21:12.397804 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:12 crc kubenswrapper[4669]: E1210 15:21:12.397930 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:12 crc kubenswrapper[4669]: E1210 15:21:12.398014 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.476861 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.476925 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.476939 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.476967 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.476984 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.580379 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.580447 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.580462 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.580488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.580506 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.686893 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.686990 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.687016 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.687065 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.687087 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.791106 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.791163 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.791175 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.791197 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.791235 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.894006 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.894075 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.894093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.894123 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.894142 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.996762 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.996831 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.996849 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.996875 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:12 crc kubenswrapper[4669]: I1210 15:21:12.996893 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:12Z","lastTransitionTime":"2025-12-10T15:21:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.099785 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.099856 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.099890 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.099918 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.099938 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.202927 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.203003 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.203022 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.203047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.203066 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.306336 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.306389 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.306405 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.306427 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.306481 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.410128 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.410197 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.410267 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.410310 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.410333 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.513605 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.513654 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.513664 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.513682 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.513697 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.617698 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.617828 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.617847 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.617871 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.617888 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.721822 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.721897 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.721918 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.721946 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.721970 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.824307 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.824374 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.824388 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.824414 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.824432 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.928405 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.928453 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.928467 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.928487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:13 crc kubenswrapper[4669]: I1210 15:21:13.928501 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:13Z","lastTransitionTime":"2025-12-10T15:21:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.031617 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.031685 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.031709 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.031737 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.031762 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.135162 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.135288 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.135305 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.135330 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.135352 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.237774 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.237828 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.237844 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.237872 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.237887 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.339697 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.339744 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.339757 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.339774 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.339788 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.397840 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.397883 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.397952 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.397843 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:14 crc kubenswrapper[4669]: E1210 15:21:14.398029 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:14 crc kubenswrapper[4669]: E1210 15:21:14.398152 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:14 crc kubenswrapper[4669]: E1210 15:21:14.398342 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:14 crc kubenswrapper[4669]: E1210 15:21:14.398523 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.443776 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.443842 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.443866 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.443889 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.443901 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.546912 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.546965 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.546985 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.547011 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.547034 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.651047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.651114 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.651126 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.651150 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.651165 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.754586 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.754656 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.754723 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.754772 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.754836 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.857994 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.858058 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.858069 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.858088 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.858101 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.960881 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.960923 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.960937 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.960958 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:14 crc kubenswrapper[4669]: I1210 15:21:14.960975 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:14Z","lastTransitionTime":"2025-12-10T15:21:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.069117 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.069160 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.069172 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.069393 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.069411 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.174062 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.174627 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.174840 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.175122 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.175359 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.278983 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.279048 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.279069 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.279097 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.279118 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.319002 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.320982 4669 scope.go:117] "RemoveContainer" containerID="1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.349676 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.364558 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.382315 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.382506 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.382597 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.382659 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.382721 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.394421 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.413974 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.427628 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.444824 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.466649 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.483030 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.484740 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.484778 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.484787 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.484803 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.484812 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.514262 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.533461 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.557193 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.573623 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.587164 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.587185 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.587193 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.587205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.587239 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.589557 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.601717 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.610019 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.619959 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.634051 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.689533 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.689883 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.689895 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.689910 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.689920 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.792710 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.792750 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.792759 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.792777 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.792789 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.820073 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/1.log" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.822791 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.823236 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.834130 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.845939 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.866336 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.876875 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.884809 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.894603 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.894645 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.894653 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.894667 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.894679 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:15Z","lastTransitionTime":"2025-12-10T15:21:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.897741 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.917353 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.940252 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.964165 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.979199 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:15 crc kubenswrapper[4669]: I1210 15:21:15.991839 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:15Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.007814 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.007846 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.007854 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.007869 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.007878 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.021044 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.033335 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.046108 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.060894 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.075546 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.086719 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.109933 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.109962 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.109972 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.109988 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.109998 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.212390 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.212422 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.212431 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.212444 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.212454 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.315132 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.315199 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.315210 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.315255 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.315267 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.397508 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:16 crc kubenswrapper[4669]: E1210 15:21:16.397684 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.398074 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:16 crc kubenswrapper[4669]: E1210 15:21:16.398170 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.398295 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:16 crc kubenswrapper[4669]: E1210 15:21:16.398379 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.398471 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:16 crc kubenswrapper[4669]: E1210 15:21:16.398534 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.416266 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.417288 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.417322 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.417334 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.417358 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.417370 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.428730 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.442196 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.456373 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.468944 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.481104 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.499977 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.518075 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.519314 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.519343 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.519354 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.519371 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.519382 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.539724 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.557273 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.573845 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.606385 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.623002 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.623039 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.623049 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.623064 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.623076 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.626953 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.659919 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.706978 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.727177 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.728256 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.728330 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.728349 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.728375 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.728390 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.745290 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:16Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.833590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.833650 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.833663 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.833685 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.833698 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.936018 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.936070 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.936090 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.936109 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:16 crc kubenswrapper[4669]: I1210 15:21:16.936121 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:16Z","lastTransitionTime":"2025-12-10T15:21:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.079587 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.079991 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.080143 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.080347 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.080491 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.183205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.183328 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.183379 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.183437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.183453 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.221661 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.221838 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:21:49.22181078 +0000 UTC m=+83.138757407 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.221934 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.222019 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.222059 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:49.222052326 +0000 UTC m=+83.138998953 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.285643 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.285702 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.285712 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.285733 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.285745 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.346230 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.346301 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.346326 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346363 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346448 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:49.346429159 +0000 UTC m=+83.263375786 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346487 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346514 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346531 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346598 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:49.346579133 +0000 UTC m=+83.263525770 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346641 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346654 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346664 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.346705 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:21:49.346687895 +0000 UTC m=+83.263634592 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.388563 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.388624 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.388634 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.388648 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.388658 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.490882 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.490949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.490959 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.490974 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.490983 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.593356 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.593406 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.593417 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.593434 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.593445 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.695733 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.695779 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.695789 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.695806 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.695820 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.798302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.798339 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.798347 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.798362 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.798371 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.837455 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/2.log" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.838568 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/1.log" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.841876 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763" exitCode=1 Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.841927 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.841968 4669 scope.go:117] "RemoveContainer" containerID="1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.843687 4669 scope.go:117] "RemoveContainer" containerID="5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763" Dec 10 15:21:17 crc kubenswrapper[4669]: E1210 15:21:17.844134 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.870185 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.892681 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.900644 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.900683 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.900693 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.900710 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.900721 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:17Z","lastTransitionTime":"2025-12-10T15:21:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.905601 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.922240 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.939288 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.950481 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.963463 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.979207 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:17 crc kubenswrapper[4669]: I1210 15:21:17.990545 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.001344 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:17Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.003020 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.003060 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.003069 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.003083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.003092 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.012937 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.026261 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.037081 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.046709 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.055459 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.066804 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.086209 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:18Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.105429 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.105506 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.105530 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.105563 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.105580 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.208094 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.208154 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.208171 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.208194 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.208212 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.311141 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.311274 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.311305 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.311334 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.311357 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.397051 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.397099 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:18 crc kubenswrapper[4669]: E1210 15:21:18.397162 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.397061 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.397187 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:18 crc kubenswrapper[4669]: E1210 15:21:18.397325 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:18 crc kubenswrapper[4669]: E1210 15:21:18.397390 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:18 crc kubenswrapper[4669]: E1210 15:21:18.397486 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.414740 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.414821 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.414838 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.414857 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.414905 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.517352 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.517390 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.517401 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.517418 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.517430 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.563923 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:18 crc kubenswrapper[4669]: E1210 15:21:18.564114 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:18 crc kubenswrapper[4669]: E1210 15:21:18.564182 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:21:34.56416374 +0000 UTC m=+68.481110367 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.620396 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.620450 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.620463 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.620484 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.620498 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.723004 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.723075 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.723086 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.723101 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.723114 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.825425 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.825478 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.825487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.825501 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.825509 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.847594 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/2.log" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.928175 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.928444 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.928554 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.928645 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:18 crc kubenswrapper[4669]: I1210 15:21:18.928869 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:18Z","lastTransitionTime":"2025-12-10T15:21:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.031511 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.031581 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.031597 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.031897 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.031932 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.134342 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.134429 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.134452 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.134485 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.134509 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.237469 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.237541 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.237559 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.237587 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.237607 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.341005 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.341059 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.341079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.341098 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.341112 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.443375 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.443446 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.443466 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.443494 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.443513 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.546176 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.546263 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.546298 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.546323 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.546339 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.650161 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.650254 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.650270 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.650292 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.650308 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.754036 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.754106 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.754130 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.754161 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.754180 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.856198 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.856289 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.856318 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.856344 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.856360 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.959588 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.959656 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.959672 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.959696 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:19 crc kubenswrapper[4669]: I1210 15:21:19.959714 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:19Z","lastTransitionTime":"2025-12-10T15:21:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.062931 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.062975 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.062985 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.063000 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.063017 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.165460 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.165511 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.165523 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.165542 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.165555 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.268112 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.268162 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.268176 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.268197 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.268238 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.308132 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.308173 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.308184 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.308205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.308234 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.322798 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:20Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.329093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.329155 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.329173 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.329211 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.329251 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.343934 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:20Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.347426 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.347486 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.347499 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.347518 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.347530 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.358818 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:20Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.362188 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.362246 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.362257 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.362272 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.362284 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.374907 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:20Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.378347 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.378395 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.378407 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.378425 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.378435 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.391259 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:20Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:20Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.391402 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.393026 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.393073 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.393085 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.393103 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.393116 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.397529 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.397544 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.397591 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.397629 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.397645 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.397780 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.397839 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:20 crc kubenswrapper[4669]: E1210 15:21:20.397950 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.495832 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.495883 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.495894 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.495913 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.495926 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.598550 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.598602 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.598617 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.598635 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.598650 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.701097 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.701160 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.701179 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.701204 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.701285 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.804720 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.804803 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.804825 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.804855 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.804877 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.907884 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.907947 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.907970 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.908001 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:20 crc kubenswrapper[4669]: I1210 15:21:20.908022 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:20Z","lastTransitionTime":"2025-12-10T15:21:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.020072 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.020113 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.020124 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.020143 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.020159 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.123625 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.123661 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.123675 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.123691 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.123702 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.226314 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.226344 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.226352 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.226365 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.226375 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.328457 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.328515 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.328529 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.328553 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.328572 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.431978 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.432083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.432107 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.432135 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.432161 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.535302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.535332 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.535342 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.535356 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.535366 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.638007 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.638066 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.638088 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.638117 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.638137 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.741195 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.741290 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.741315 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.741341 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.741357 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.844833 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.844905 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.844926 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.844954 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.844980 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.959882 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.959938 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.959955 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.959975 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:21 crc kubenswrapper[4669]: I1210 15:21:21.959992 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:21Z","lastTransitionTime":"2025-12-10T15:21:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.050410 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.068687 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.069009 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.069477 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.069656 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.069797 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.073672 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.079517 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.100950 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.115537 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.125315 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.136654 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.146766 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.158319 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.171845 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.171888 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.171899 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.171916 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.171926 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.175849 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.186751 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.196130 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.205357 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.217819 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.236724 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.250240 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.270415 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.275011 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.275067 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.275080 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.275105 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.275122 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.283015 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.299045 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:22Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.377605 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.377644 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.377655 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.377671 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.377681 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.397054 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.397115 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:22 crc kubenswrapper[4669]: E1210 15:21:22.397189 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.397240 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.397070 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:22 crc kubenswrapper[4669]: E1210 15:21:22.397358 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:22 crc kubenswrapper[4669]: E1210 15:21:22.397417 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:22 crc kubenswrapper[4669]: E1210 15:21:22.397485 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.479824 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.479887 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.479898 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.479916 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.479932 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.583040 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.583104 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.583128 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.583158 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.583180 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.686603 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.686674 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.686697 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.686727 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.686748 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.788879 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.788923 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.788930 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.788944 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.788954 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.892423 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.892480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.892488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.892502 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.892522 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.994959 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.995005 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.995016 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.995038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:22 crc kubenswrapper[4669]: I1210 15:21:22.995051 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:22Z","lastTransitionTime":"2025-12-10T15:21:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.098235 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.098277 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.098287 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.098302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.098314 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.201032 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.201065 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.201075 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.201088 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.201098 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.302716 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.302749 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.302757 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.302771 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.302779 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.405401 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.405449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.405463 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.405480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.405493 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.507831 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.507874 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.507884 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.507902 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.507913 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.611052 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.611108 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.611120 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.611141 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.611156 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.715557 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.715633 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.715652 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.715675 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.715696 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.817609 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.817650 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.817658 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.817671 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.817682 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.920651 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.920683 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.920698 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.920712 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:23 crc kubenswrapper[4669]: I1210 15:21:23.920721 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:23Z","lastTransitionTime":"2025-12-10T15:21:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.022641 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.022681 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.022688 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.022702 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.022712 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.126007 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.126070 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.126093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.126122 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.126144 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.229142 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.229184 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.229195 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.229211 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.229244 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.331172 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.331235 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.331252 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.331276 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.331293 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.397478 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.397504 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:24 crc kubenswrapper[4669]: E1210 15:21:24.397974 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.397579 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.397560 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:24 crc kubenswrapper[4669]: E1210 15:21:24.398149 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:24 crc kubenswrapper[4669]: E1210 15:21:24.398290 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:24 crc kubenswrapper[4669]: E1210 15:21:24.397860 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.435010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.435080 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.435094 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.435114 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.435125 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.538428 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.538552 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.538575 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.538608 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.538646 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.642099 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.642181 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.642204 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.642272 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.642300 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.744081 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.744125 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.744134 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.744148 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.744159 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.847381 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.847440 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.847560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.847599 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.847619 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.950709 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.950767 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.950782 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.950804 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:24 crc kubenswrapper[4669]: I1210 15:21:24.950818 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:24Z","lastTransitionTime":"2025-12-10T15:21:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.054175 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.054298 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.054331 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.054360 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.054379 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.157499 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.157597 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.157617 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.157678 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.157697 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.261670 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.261752 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.261774 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.261801 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.261822 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.364313 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.364430 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.364447 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.364468 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.364482 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.467382 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.467488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.467512 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.467581 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.467604 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.570830 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.570885 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.570902 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.570927 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.570950 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.674769 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.674819 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.674836 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.674859 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.674905 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.777796 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.777864 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.777888 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.777919 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.777944 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.880749 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.880838 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.880863 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.880887 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.880903 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.983941 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.983985 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.984001 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.984025 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:25 crc kubenswrapper[4669]: I1210 15:21:25.984043 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:25Z","lastTransitionTime":"2025-12-10T15:21:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.088349 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.088405 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.088426 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.088449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.088465 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.192582 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.192641 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.192658 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.192683 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.192700 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.295047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.295083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.295094 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.295107 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.295116 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.397494 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:26 crc kubenswrapper[4669]: E1210 15:21:26.397921 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.397629 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:26 crc kubenswrapper[4669]: E1210 15:21:26.398034 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.397555 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:26 crc kubenswrapper[4669]: E1210 15:21:26.398125 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.397638 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:26 crc kubenswrapper[4669]: E1210 15:21:26.398244 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.398714 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.398754 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.398771 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.398794 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.398811 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.424394 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.446934 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.466769 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.484880 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.501906 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.502203 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.502278 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.502296 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.502325 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.502360 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.518405 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.537367 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.564564 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1f9b312123e959e7574cfa4d4538b5e50d2dcc836b8276409d0a4470414daed0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"t network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:02Z is after 2025-08-24T17:21:41Z]\\\\nI1210 15:21:02.345359 5984 base_network_controller_pods.go:477] [default/openshift-network-diagnostics/network-check-source-55646444c4-trplf] creating logical port openshift-network-diagnostics_network-check-source-55646444c4-trplf for pod on switch crc\\\\nI1210 15:21:02.347073 5984 services_controller.go:451] Built service openshift-machine-api/machine-api-controllers cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-api/machine-api-controllers_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/mach\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:00Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.575206 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.590361 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.605015 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.605055 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.605064 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.605092 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.605102 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.613184 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.628613 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.642009 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.656648 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.670302 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.683623 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.694996 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.708499 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.708559 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.708585 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.708610 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.708629 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.710511 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:26Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.810897 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.810943 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.810955 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.810971 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.810986 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.913121 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.913167 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.913178 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.913198 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:26 crc kubenswrapper[4669]: I1210 15:21:26.913210 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:26Z","lastTransitionTime":"2025-12-10T15:21:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.016005 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.016043 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.016051 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.016064 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.016074 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.118626 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.118666 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.118674 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.118690 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.118700 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.220901 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.220961 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.220980 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.221004 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.221019 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.323683 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.323731 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.323747 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.323767 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.323781 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.426497 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.426541 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.426560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.426584 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.426603 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.529361 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.529400 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.529410 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.529426 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.529437 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.632363 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.632424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.632442 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.632468 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.632486 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.735081 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.735484 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.735559 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.735642 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.735721 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.839457 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.839496 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.839508 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.839536 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.839546 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.942708 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.942771 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.942789 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.942815 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:27 crc kubenswrapper[4669]: I1210 15:21:27.942833 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:27Z","lastTransitionTime":"2025-12-10T15:21:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.047884 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.047943 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.047959 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.047983 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.048001 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.150842 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.150899 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.150921 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.150949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.150974 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.253416 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.253458 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.253472 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.253488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.253500 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.356348 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.356408 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.356424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.356446 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.356462 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.397885 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:28 crc kubenswrapper[4669]: E1210 15:21:28.398067 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.398927 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.399166 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.399422 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:28 crc kubenswrapper[4669]: E1210 15:21:28.399572 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:28 crc kubenswrapper[4669]: E1210 15:21:28.399855 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:28 crc kubenswrapper[4669]: E1210 15:21:28.400282 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.400604 4669 scope.go:117] "RemoveContainer" containerID="5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763" Dec 10 15:21:28 crc kubenswrapper[4669]: E1210 15:21:28.400983 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.421680 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.437322 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.452994 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.458967 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.459006 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.459019 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.459038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.459049 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.467250 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.483203 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.499188 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.515795 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.527377 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.539100 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.550036 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.561532 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.561591 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.561607 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.561630 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.561648 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.568703 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.584180 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.599433 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.617054 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.637839 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.652043 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.663807 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.664326 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.664411 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.664501 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.664577 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.672907 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.686100 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:28Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.766681 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.766715 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.766726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.766747 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.766763 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.869674 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.870028 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.870040 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.870056 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.870088 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.973166 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.973243 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.973259 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.973283 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:28 crc kubenswrapper[4669]: I1210 15:21:28.973297 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:28Z","lastTransitionTime":"2025-12-10T15:21:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.076071 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.076128 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.076145 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.076166 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.076183 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.179203 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.179307 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.179321 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.179341 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.179353 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.282427 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.282481 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.282493 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.282511 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.282527 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.385315 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.385358 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.385373 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.385392 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.385405 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.488122 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.488177 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.488191 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.488214 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.488249 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.590718 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.591160 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.591260 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.591337 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.591400 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.694581 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.695071 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.695196 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.695317 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.695395 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.798815 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.799259 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.799352 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.799439 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.799510 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.901560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.901657 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.901680 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.901718 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:29 crc kubenswrapper[4669]: I1210 15:21:29.901740 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:29Z","lastTransitionTime":"2025-12-10T15:21:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.008125 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.008192 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.008213 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.008259 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.008311 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.111728 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.111805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.111818 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.111837 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.111850 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.213778 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.213824 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.213835 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.213850 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.213860 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.316949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.317336 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.317453 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.317547 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.317612 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.397654 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.398303 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.398431 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.398479 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.398568 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.398752 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.398867 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.399194 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.420331 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.420396 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.420408 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.420425 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.420438 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.484745 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.484777 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.484788 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.484802 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.484811 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.504566 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:30Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.509075 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.509129 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.509146 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.509169 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.509183 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.523649 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:30Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.527480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.527525 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.527537 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.527555 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.527567 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.541718 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:30Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.546151 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.546181 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.546191 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.546207 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.546233 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.559633 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:30Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.563634 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.563679 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.563687 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.563701 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.563711 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.577674 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:30Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:30 crc kubenswrapper[4669]: E1210 15:21:30.577804 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.579867 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.579908 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.579917 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.579949 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.579962 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.683628 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.683708 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.683726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.683752 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.683771 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.787480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.787554 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.787568 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.787593 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.787607 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.891008 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.891058 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.891070 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.891093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.891129 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.993532 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.993586 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.993602 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.993625 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:30 crc kubenswrapper[4669]: I1210 15:21:30.993640 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:30Z","lastTransitionTime":"2025-12-10T15:21:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.096750 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.096799 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.096811 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.096827 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.096838 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.200033 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.200073 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.200083 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.200121 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.200131 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.302518 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.302576 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.302588 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.302610 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.302622 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.404973 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.405027 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.405038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.405063 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.405075 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.510848 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.510908 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.510922 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.510950 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.510967 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.614609 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.614698 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.614713 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.614736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.614748 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.717681 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.717737 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.717754 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.717774 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.717787 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.820923 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.821320 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.821402 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.821489 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.821558 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.924746 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.924801 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.924812 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.924831 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:31 crc kubenswrapper[4669]: I1210 15:21:31.924841 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:31Z","lastTransitionTime":"2025-12-10T15:21:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.027117 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.027181 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.027192 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.027209 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.027234 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.130240 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.130286 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.130296 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.130314 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.130326 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.233689 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.234166 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.234298 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.234406 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.234507 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.337988 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.338045 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.338057 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.338079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.338091 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.397870 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.397942 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.397890 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:32 crc kubenswrapper[4669]: E1210 15:21:32.398037 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:32 crc kubenswrapper[4669]: E1210 15:21:32.398178 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:32 crc kubenswrapper[4669]: E1210 15:21:32.398291 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.398451 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:32 crc kubenswrapper[4669]: E1210 15:21:32.398593 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.442177 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.442242 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.442254 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.442276 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.442292 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.545472 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.545524 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.545534 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.545557 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.545569 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.648466 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.648506 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.648515 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.648530 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.648539 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.752185 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.752243 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.752255 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.752273 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.752287 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.855134 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.855189 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.855206 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.855249 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.855266 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.957400 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.957684 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.957784 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.957853 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:32 crc kubenswrapper[4669]: I1210 15:21:32.957917 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:32Z","lastTransitionTime":"2025-12-10T15:21:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.060783 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.060826 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.060834 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.060849 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.060859 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.163414 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.163727 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.163827 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.163920 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.164011 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.267133 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.267195 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.267205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.267234 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.267244 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.370006 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.370055 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.370068 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.370089 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.370102 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.472755 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.472802 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.472813 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.472830 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.472842 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.575477 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.575533 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.575541 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.575555 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.575563 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.678045 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.678095 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.678105 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.678117 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.678126 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.780496 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.780538 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.780546 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.780579 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.780589 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.883014 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.883062 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.883076 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.883092 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.883109 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.984894 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.984940 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.984950 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.984967 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:33 crc kubenswrapper[4669]: I1210 15:21:33.984978 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:33Z","lastTransitionTime":"2025-12-10T15:21:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.087884 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.087933 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.087945 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.087961 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.087971 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.190918 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.190962 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.190972 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.190987 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.190999 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.292759 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.292815 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.292832 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.292856 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.292872 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.395906 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.395950 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.395960 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.395974 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.395985 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.397144 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.397339 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.397378 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.397446 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:34 crc kubenswrapper[4669]: E1210 15:21:34.397516 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:34 crc kubenswrapper[4669]: E1210 15:21:34.397646 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:34 crc kubenswrapper[4669]: E1210 15:21:34.397710 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:34 crc kubenswrapper[4669]: E1210 15:21:34.397780 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.411077 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.499447 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.499488 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.499501 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.499522 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.499533 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.602650 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.602694 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.602704 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.602720 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.602731 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.638200 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:34 crc kubenswrapper[4669]: E1210 15:21:34.638412 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:34 crc kubenswrapper[4669]: E1210 15:21:34.638526 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:22:06.638499604 +0000 UTC m=+100.555446321 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.705748 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.705804 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.705815 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.705836 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.705848 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.808157 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.808190 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.808201 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.808432 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.808446 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.910345 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.910747 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.910761 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.910781 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:34 crc kubenswrapper[4669]: I1210 15:21:34.910792 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:34Z","lastTransitionTime":"2025-12-10T15:21:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.013028 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.013062 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.013074 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.013091 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.013101 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.116415 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.116492 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.116572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.116649 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.116695 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.218998 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.219035 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.219043 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.219055 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.219064 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.321901 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.321945 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.321956 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.321973 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.321984 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.424549 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.424594 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.424603 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.424622 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.424640 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.527228 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.527273 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.527281 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.527295 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.527309 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.629616 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.629674 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.629684 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.629698 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.629708 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.732450 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.732487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.732497 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.732516 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.732828 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.835576 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.835630 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.835644 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.835672 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.835685 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.937937 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.937989 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.938001 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.938018 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:35 crc kubenswrapper[4669]: I1210 15:21:35.938028 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:35Z","lastTransitionTime":"2025-12-10T15:21:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.040544 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.040587 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.040598 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.040618 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.040630 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.143181 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.143250 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.143265 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.143282 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.143294 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.245516 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.245548 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.245558 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.245576 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.245588 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.348974 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.349013 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.349024 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.349068 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.349078 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.397486 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.397510 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.397545 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.397497 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:36 crc kubenswrapper[4669]: E1210 15:21:36.397615 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:36 crc kubenswrapper[4669]: E1210 15:21:36.397693 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:36 crc kubenswrapper[4669]: E1210 15:21:36.397761 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:36 crc kubenswrapper[4669]: E1210 15:21:36.397873 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.407371 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.418404 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.426681 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.437638 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.450882 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.450913 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.450923 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.450937 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.450948 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.451004 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.463125 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.475379 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.483978 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.495130 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.505861 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.515415 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.529257 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.540349 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9221c776-2bb7-4936-8760-7c99211087aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.552852 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.552903 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.552915 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.552953 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.552967 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.553320 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.566499 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.584935 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.596407 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.613920 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.624411 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:36Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.655081 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.655172 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.655185 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.655203 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.655228 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.757537 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.757602 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.757615 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.757634 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.757648 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.859307 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.859336 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.859347 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.859359 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.859369 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.961870 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.961912 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.961921 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.961937 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:36 crc kubenswrapper[4669]: I1210 15:21:36.961946 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:36Z","lastTransitionTime":"2025-12-10T15:21:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.064072 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.064118 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.064127 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.064141 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.064152 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.166263 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.166293 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.166302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.166316 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.166326 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.268447 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.268477 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.268487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.268505 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.268514 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.370723 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.370767 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.370778 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.370792 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.370802 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.485479 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.485564 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.485608 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.485634 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.485647 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.588481 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.588533 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.588548 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.588566 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.588579 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.693418 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.693451 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.693460 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.693475 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.693484 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.795536 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.795581 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.795590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.795604 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.795615 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.898786 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.899154 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.899168 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.899188 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.899201 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:37Z","lastTransitionTime":"2025-12-10T15:21:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.914717 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/0.log" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.914763 4669 generic.go:334] "Generic (PLEG): container finished" podID="3dda8be1-e5bc-42a3-820e-4285b75bf8c2" containerID="96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c" exitCode=1 Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.914793 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerDied","Data":"96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c"} Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.915180 4669 scope.go:117] "RemoveContainer" containerID="96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.948101 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:37Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.961953 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:37Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.973530 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:37Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:37 crc kubenswrapper[4669]: I1210 15:21:37.989873 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:37Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.002345 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.002390 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.002401 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.002420 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.002430 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.004476 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.017301 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9221c776-2bb7-4936-8760-7c99211087aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.030132 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"2025-12-10T15:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9\\\\n2025-12-10T15:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9 to /host/opt/cni/bin/\\\\n2025-12-10T15:20:52Z [verbose] multus-daemon started\\\\n2025-12-10T15:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-10T15:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.051705 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.063327 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.085926 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.104270 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.104323 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.104335 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.104352 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.104364 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.106371 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.118985 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.133450 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.147461 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.160108 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.171788 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.183059 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.195328 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.206966 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.207035 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.207046 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.207063 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.207076 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.210002 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.309978 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.310021 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.310032 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.310045 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.310054 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.397989 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:38 crc kubenswrapper[4669]: E1210 15:21:38.398172 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.398498 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:38 crc kubenswrapper[4669]: E1210 15:21:38.398608 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.398798 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:38 crc kubenswrapper[4669]: E1210 15:21:38.398891 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.399271 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:38 crc kubenswrapper[4669]: E1210 15:21:38.399518 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.412374 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.412438 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.412448 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.412484 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.412496 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.515368 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.515411 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.515424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.515468 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.515481 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.618165 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.618199 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.618207 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.618235 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.618248 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.725240 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.725292 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.725303 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.725322 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.725334 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.827692 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.827726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.827736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.827751 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.827763 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.923145 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/0.log" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.923296 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerStarted","Data":"235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.934365 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.934437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.934448 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.934477 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.934495 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:38Z","lastTransitionTime":"2025-12-10T15:21:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.937953 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9221c776-2bb7-4936-8760-7c99211087aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.950630 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"2025-12-10T15:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9\\\\n2025-12-10T15:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9 to /host/opt/cni/bin/\\\\n2025-12-10T15:20:52Z [verbose] multus-daemon started\\\\n2025-12-10T15:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-10T15:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.970206 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:38 crc kubenswrapper[4669]: I1210 15:21:38.983026 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:38Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.004585 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.018467 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.037590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.037629 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.037639 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.037539 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.037658 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.037844 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.050824 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.066440 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.081099 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.092090 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.110081 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.124280 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.136395 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.139664 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.139706 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.139719 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.139738 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.139749 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.149274 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.163781 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.178950 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.192091 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.203912 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:39Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.242271 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.242301 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.242310 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.242327 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.242338 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.345424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.345480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.345496 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.345522 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.345539 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.448587 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.448636 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.448648 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.448667 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.448682 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.550901 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.550934 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.550942 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.550957 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.550967 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.652854 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.652898 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.652907 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.652925 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.652939 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.755665 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.755709 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.755718 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.755736 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.755748 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.858751 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.858827 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.858845 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.858871 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.858889 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.961498 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.961560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.961574 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.961595 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:39 crc kubenswrapper[4669]: I1210 15:21:39.961609 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:39Z","lastTransitionTime":"2025-12-10T15:21:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.063704 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.063751 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.063763 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.063782 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.063795 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.166763 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.166818 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.166832 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.166849 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.166860 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.269611 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.269662 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.269671 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.269687 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.269699 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.372998 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.373053 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.373066 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.373088 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.373101 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.397782 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.397828 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.397838 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.397785 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.397967 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.398106 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.398352 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.398491 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.476462 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.476524 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.476538 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.476556 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.476569 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.578878 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.578956 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.578971 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.579002 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.579018 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.584392 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.584449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.584461 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.584481 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.584497 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.600187 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:40Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.605677 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.605725 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.605739 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.605779 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.605792 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.621750 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:40Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.626038 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.626111 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.626125 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.626145 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.626178 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.641670 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:40Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.646647 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.646701 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.646711 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.646732 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.646746 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.661210 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:40Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.665469 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.665499 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.665509 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.665527 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.665539 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.678874 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"4f7987ab-4a09-457d-8b66-5542ead6568f\\\",\\\"systemUUID\\\":\\\"014cfcb6-977c-4f8f-a8ab-18a9d298357b\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:40Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:40 crc kubenswrapper[4669]: E1210 15:21:40.679032 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.681475 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.681500 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.681510 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.681525 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.681535 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.783847 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.783901 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.783916 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.783934 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.783947 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.886325 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.886373 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.886388 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.886406 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.886418 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.989924 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.989966 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.989980 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.989997 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:40 crc kubenswrapper[4669]: I1210 15:21:40.990010 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:40Z","lastTransitionTime":"2025-12-10T15:21:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.092348 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.092394 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.092408 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.092426 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.092445 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.195204 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.195319 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.195344 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.195375 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.195404 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.298284 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.298349 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.298367 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.298395 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.298420 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.401508 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.401542 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.401553 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.401572 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.401583 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.504188 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.504254 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.504264 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.504279 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.504289 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.606049 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.606084 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.606092 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.606105 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.606114 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.708969 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.709005 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.709013 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.709026 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.709034 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.812610 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.812657 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.812669 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.812687 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.812699 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.915309 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.915343 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.915351 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.915365 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:41 crc kubenswrapper[4669]: I1210 15:21:41.915375 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:41Z","lastTransitionTime":"2025-12-10T15:21:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.018000 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.018063 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.018079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.018131 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.018147 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.121041 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.121371 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.121444 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.121511 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.121581 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.224386 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.224447 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.224462 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.224487 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.224505 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.327461 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.327538 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.327560 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.327588 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.327610 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.398198 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.398198 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.398319 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.398366 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:42 crc kubenswrapper[4669]: E1210 15:21:42.398533 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:42 crc kubenswrapper[4669]: E1210 15:21:42.398788 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:42 crc kubenswrapper[4669]: E1210 15:21:42.398845 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:42 crc kubenswrapper[4669]: E1210 15:21:42.398954 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.400323 4669 scope.go:117] "RemoveContainer" containerID="5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.435772 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.436509 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.436631 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.436726 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.436826 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.540145 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.540590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.540677 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.540821 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.541007 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.645840 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.645900 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.645915 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.645938 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.645955 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.749405 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.749843 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.749943 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.750041 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.750128 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.853200 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.853294 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.853306 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.853322 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.853334 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.937938 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/2.log" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.941450 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.942639 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.955635 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.955689 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.955704 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.955729 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.955744 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:42Z","lastTransitionTime":"2025-12-10T15:21:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.964596 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:42Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:42 crc kubenswrapper[4669]: I1210 15:21:42.978330 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:42Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:42.999849 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:42Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.023610 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.038405 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.059376 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.059446 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.059456 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.059495 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.059508 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.064640 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.093826 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.107985 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.124833 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.136452 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.147016 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.158981 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.162117 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.162168 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.162178 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.162197 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.162209 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.173637 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.185402 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.197265 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.208496 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.220256 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.231011 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9221c776-2bb7-4936-8760-7c99211087aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.242899 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"2025-12-10T15:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9\\\\n2025-12-10T15:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9 to /host/opt/cni/bin/\\\\n2025-12-10T15:20:52Z [verbose] multus-daemon started\\\\n2025-12-10T15:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-10T15:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:43Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.302822 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.302868 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.302877 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.302894 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.302909 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.478062 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.478090 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.478101 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.478115 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.478123 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.580778 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.580816 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.580825 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.580841 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.580854 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.720027 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.720069 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.720079 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.720096 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.720105 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.822633 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.822970 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.823047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.823127 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.823194 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.925356 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.925646 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.925724 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.925789 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:43 crc kubenswrapper[4669]: I1210 15:21:43.925846 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:43Z","lastTransitionTime":"2025-12-10T15:21:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.028612 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.028894 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.029025 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.029156 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.029330 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.166208 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.166262 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.166270 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.166284 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.166294 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.267917 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.267955 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.267964 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.267977 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.267988 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.370683 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.370750 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.370759 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.370781 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.370792 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.397191 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.397341 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.397473 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:44 crc kubenswrapper[4669]: E1210 15:21:44.397360 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:44 crc kubenswrapper[4669]: E1210 15:21:44.397588 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.397642 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:44 crc kubenswrapper[4669]: E1210 15:21:44.397779 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:44 crc kubenswrapper[4669]: E1210 15:21:44.397869 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.472935 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.473268 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.473353 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.473424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.473501 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.576817 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.576846 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.576854 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.576868 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.576876 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.678962 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.678989 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.678997 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.679010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.679019 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.857047 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.857093 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.857105 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.857123 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.857136 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.960042 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.960463 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.960760 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.960916 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:44 crc kubenswrapper[4669]: I1210 15:21:44.961047 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:44Z","lastTransitionTime":"2025-12-10T15:21:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.064706 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.064754 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.064767 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.064789 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.064816 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.167518 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.167554 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.167565 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.167579 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.167589 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.270179 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.270247 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.270260 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.270279 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.270291 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.374196 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.374283 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.374297 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.374318 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.374338 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.477117 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.477184 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.477244 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.477298 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.477324 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.580275 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.580331 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.580341 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.580358 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.580369 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.684137 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.684205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.684264 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.684302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.684319 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.786981 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.787031 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.787042 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.787061 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.787073 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.890163 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.890247 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.890261 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.890276 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.890287 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.982450 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/3.log" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.983546 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/2.log" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.988181 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904" exitCode=1 Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.988309 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904"} Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.988401 4669 scope.go:117] "RemoveContainer" containerID="5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.990565 4669 scope.go:117] "RemoveContainer" containerID="491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904" Dec 10 15:21:45 crc kubenswrapper[4669]: E1210 15:21:45.990881 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.997932 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.997982 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.997999 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.998024 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:45 crc kubenswrapper[4669]: I1210 15:21:45.998041 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:45Z","lastTransitionTime":"2025-12-10T15:21:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.016244 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.031146 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.056180 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:45Z\\\",\\\"message\\\":\\\"1210 15:21:44.932508 6549 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 15:21:44.932752 6549 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 15:21:44.932792 6549 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 15:21:44.934936 6549 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 15:21:44.935627 6549 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1210 15:21:44.935644 6549 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1210 15:21:44.935658 6549 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:21:44.935663 6549 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:21:44.935677 6549 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:21:44.935692 6549 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:21:44.935728 6549 factory.go:656] Stopping watch factory\\\\nI1210 15:21:44.935742 6549 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:21:44.935761 6549 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.069415 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.082453 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.094839 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.101451 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.101646 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.101760 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.101849 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.101932 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.110679 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.124001 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.139158 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.150989 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.165791 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.178144 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.190242 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.202371 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.205440 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.205485 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.205497 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.205514 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.205524 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.218132 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.230135 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.241499 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.254638 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9221c776-2bb7-4936-8760-7c99211087aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.266818 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"2025-12-10T15:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9\\\\n2025-12-10T15:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9 to /host/opt/cni/bin/\\\\n2025-12-10T15:20:52Z [verbose] multus-daemon started\\\\n2025-12-10T15:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-10T15:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.307971 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.308029 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.308040 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.308057 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.308071 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.397360 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.397457 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.397384 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:46 crc kubenswrapper[4669]: E1210 15:21:46.397532 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:46 crc kubenswrapper[4669]: E1210 15:21:46.397601 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:46 crc kubenswrapper[4669]: E1210 15:21:46.397713 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.397780 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:46 crc kubenswrapper[4669]: E1210 15:21:46.397975 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.410285 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.410397 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.410421 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.410449 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.410473 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.422052 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1e469543-9bc7-4ec2-9123-bac2b16d1800\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ff9c3ca10fd3bda5a907a79c068f2fe39021033aae6bd33a8dc3e3902ddd9222\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1ddcd2021f68d2af4761282cf9176ab6e9537ac7c8af621b5b60a9c07d620197\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b4a0adbf709db8e4116df71e68183f5264e7482c61e1f2930c3a30d3ad3cdb8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://65165ba177b70aa3b92777f04c8f01b590201bba1978921dcdac31a33ff91095\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a716e4eecf40d1924519e92b0a22cf79e9eb9d69f0b7d5d2b64b58f43be164cc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://984fc6052674d0dd1fe5c20dc94c9fe8841110f977500432fe74ea7564c51871\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b6c62ad1ffe26a1b9410fffc602db15803f99eb135adfbb439e396e71b16d46\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0b7de8f4a513a71d6e42b78a86621baed8b2ec5488ef9ba24d30dfcb905bc8f7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.439737 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.463090 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8dc35dac-41a2-4bc1-ad26-5f515126921e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5f9bae279a880991da00856f22e272f049c29f8f966db3a3455c7829b397e763\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:17Z\\\",\\\"message\\\":\\\"de-6hbdc openshift-dns/node-resolver-vck4f openshift-machine-config-operator/machine-config-daemon-5tqlx openshift-multus/multus-additional-cni-plugins-zqf8t openshift-multus/multus-s4g62 openshift-multus/network-metrics-daemon-rz9mm openshift-network-console/networking-console-plugin-85b44fc459-gdk6g openshift-network-diagnostics/network-check-target-xd92c openshift-etcd/etcd-crc openshift-kube-controller-manager/kube-controller-manager-crc openshift-network-operator/iptables-alerter-4ln5h openshift-network-operator/network-operator-58b4c7f79c-55gtf]\\\\nI1210 15:21:17.364164 6179 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1210 15:21:17.364170 6179 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1210 15:21:17.364185 6179 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI1210 15:21:17.364200 6179 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nF1210 15:21:17.364246 6179 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handle\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:45Z\\\",\\\"message\\\":\\\"1210 15:21:44.932508 6549 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 15:21:44.932752 6549 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 15:21:44.932792 6549 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1210 15:21:44.934936 6549 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1210 15:21:44.935627 6549 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1210 15:21:44.935644 6549 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1210 15:21:44.935658 6549 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1210 15:21:44.935663 6549 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1210 15:21:44.935677 6549 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1210 15:21:44.935692 6549 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1210 15:21:44.935728 6549 factory.go:656] Stopping watch factory\\\\nI1210 15:21:44.935742 6549 ovnkube.go:599] Stopped ovnkube\\\\nI1210 15:21:44.935761 6549 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:21:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-88p9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-6hbdc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.478443 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ce60e1d4-6433-477d-89be-6ff9354dd0a4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b52516c1affbe79179abb977fb491f72267f1bdf347d7f118a54d56bf6637e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s7cvw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-5tqlx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.491897 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d099499-bf36-4f4f-a556-47f9351394d3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:59Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://08af83a92418cbd54ba43caafd1ba6002d9a66ff2ecd7357b60b0ae081f2a3f1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52051e49579cb6cd32194e41e54277e26cf797c9afbf9d801be9fcf77b67d01b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://511c999208ebf18a5f5a84471337f86b47ee405ac4ec17ea12a2889a4db33f29\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://644d2617c44b28476e3e30181d33864910159eabad7f5c12da26218f65cffe4f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://903bccd22e1b11c09272bdc8b06756cc9633f6b3e6e8399f65d2cb060cdb4244\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:54Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0010356614ce4ac7ea3a1635411c90223de1e5f9df567da49290845f817c7e25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3942296d6aec9811cb092cde2d56ce4d0670f91f81ea5ff00dbaa122e07e6d58\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhmnr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-zqf8t\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.502670 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0cf80d68-31ee-4750-a96a-3b66b4ce7c07\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5e6d75bf5ff884fec59058a141854a7f9222a8337f86b56098851e30a0f4d6d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5248cf34928a68839c3f4da115b0009ef1b4f6fc313018a6f60344c2ee8f5ef\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88908b68f1b6c8e8ee6cf65e57077640c6cf07c6f975a1b5c4d6e5b11c7602e1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a1551f9bba04d5ec7d121d96850a0d14e06d066bc8f399fe1d93ac488b260b04\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.513330 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.513365 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.513373 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.513392 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.513404 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.519549 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"58930104-b0aa-4955-93ac-98fa4a576fef\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:29Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.536414 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f7fe6b1f-e80e-4ef6-986d-50f02c8eae71\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1f66cc2c96ae0fba5c03a33696b6b1bcc440fd4ff5827bbf5b9e7e578c7b19b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://77215e95326e9c763b5f4a4b861715261969c719891a42db657c54156c765455\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://28e7aedf2b7d458c46e9e56151983cefcc2c09e6b65cb694e475de7c0f7106c9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.560281 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfa4f705eabc591b9b81459b34a6e847c88aa5090211a7050b5d56434f36f749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.571968 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-h76v4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"322826d8-4f6d-4ef8-b724-7d7a76490356\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://40aa491f14ed0582bd96f3b146b4320fff73847a10fac893e123e2f2a1865971\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vfzjg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-h76v4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.580768 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82f00eda-0389-4ff1-ae1c-ce2790df3a44\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:01Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1810acb16a61d2365f8cad7fa2a1857a80efb986ad999c152388c277bbd34219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd3835853caca06de20277e64c8a84076b1123922a1e0d295d06a23146548cdc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wmn42\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:01Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-jq6vl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.589779 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:02Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4pvdj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:21:02Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rz9mm\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.601530 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.615701 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.615742 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.615760 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.615781 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.615795 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.618304 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:44Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.638561 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://57e5ea56a787dd06ccac0abb375b18ef88cf91ba55f066a8eeb39f4a7400d3b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1ecaf10ddae5a73c47bbc6a33b604cd51e89d7c6b62825b0cf39025a965d7d9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.652620 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bf08ba6d7de9dd7d7e1a647f87aadfb267a956570dc4bdf2032c79de8aa99d38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.663127 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-vck4f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"11cdb984-c1c3-4762-a527-8f0243733219\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f3d8a602d7cc773c6523581bff3fc1488ed10fd3681376cd393dd9dae55f2581\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tbxbd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:46Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-vck4f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.675618 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9221c776-2bb7-4936-8760-7c99211087aa\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ed05214809a23e10476b1d4cff9928185e996ae5fbf9b87237f9b2cee3afb1d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:20:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aaadf7f6f9959febbd64fa277d29a62df986ec72bb1747ba8d0d0b670b21bb70\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-10T15:20:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.690616 4669 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-s4g62" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3dda8be1-e5bc-42a3-820e-4285b75bf8c2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:20:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-10T15:21:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-10T15:21:37Z\\\",\\\"message\\\":\\\"2025-12-10T15:20:52+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9\\\\n2025-12-10T15:20:52+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4e2d3344-eabd-4eeb-8a83-ede1bc3b86e9 to /host/opt/cni/bin/\\\\n2025-12-10T15:20:52Z [verbose] multus-daemon started\\\\n2025-12-10T15:20:52Z [verbose] Readiness Indicator file check\\\\n2025-12-10T15:21:37Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-10T15:20:48Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-10T15:21:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dr8hl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-10T15:20:47Z\\\"}}\" for pod \"openshift-multus\"/\"multus-s4g62\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-10T15:21:46Z is after 2025-08-24T17:21:41Z" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.718493 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.718539 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.718551 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.718568 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.718580 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.821951 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.822393 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.822412 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.822437 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.822457 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.924534 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.924590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.924601 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.924616 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.924626 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:46Z","lastTransitionTime":"2025-12-10T15:21:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:46 crc kubenswrapper[4669]: I1210 15:21:46.993743 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/3.log" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.026318 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.026616 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.026626 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.026640 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.026650 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.129305 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.129354 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.129368 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.129385 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.129397 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.231806 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.231845 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.231854 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.231867 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.231877 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.335575 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.335622 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.335633 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.335651 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.335664 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.437539 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.437585 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.437603 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.437619 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.437630 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.540802 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.540859 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.540881 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.540908 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.540931 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.644731 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.644768 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.644780 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.644797 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.644808 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.747984 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.748023 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.748034 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.748048 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.748060 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.852295 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.852358 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.852377 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.852405 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.852425 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.955233 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.955271 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.955279 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.955294 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:47 crc kubenswrapper[4669]: I1210 15:21:47.955327 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:47Z","lastTransitionTime":"2025-12-10T15:21:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.059898 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.060281 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.060386 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.060480 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.060574 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.163350 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.163397 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.163407 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.163424 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.163435 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.267314 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.267710 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.267882 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.268026 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.268152 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.370892 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.371371 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.371575 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.371805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.371976 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.397464 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.397545 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.397544 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:48 crc kubenswrapper[4669]: E1210 15:21:48.397674 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:48 crc kubenswrapper[4669]: E1210 15:21:48.397878 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.397903 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:48 crc kubenswrapper[4669]: E1210 15:21:48.398041 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:48 crc kubenswrapper[4669]: E1210 15:21:48.398168 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.474913 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.475185 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.475303 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.475403 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.475490 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.578590 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.578943 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.579195 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.579476 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.579738 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.682645 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.682719 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.682733 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.682752 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.682765 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.785697 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.785757 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.785779 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.785805 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.785826 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.889303 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.889376 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.889404 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.889435 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.889458 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.991664 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.991695 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.991703 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.991718 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:48 crc kubenswrapper[4669]: I1210 15:21:48.991728 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:48Z","lastTransitionTime":"2025-12-10T15:21:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.093739 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.093786 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.093794 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.093811 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.093819 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.196904 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.197008 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.197028 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.197048 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.197059 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.300291 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.300328 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.300337 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.300352 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.300361 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.303780 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.303952 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.304066 4669 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.304105 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.304092221 +0000 UTC m=+147.221038848 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.304292 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.304284577 +0000 UTC m=+147.221231204 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.402293 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.402337 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.402348 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.402363 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.402375 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.404891 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.404945 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.404965 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405079 4669 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405119 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405134 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405145 4669 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405089 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405191 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.405178125 +0000 UTC m=+147.322124742 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405207 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.405201015 +0000 UTC m=+147.322147642 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405231 4669 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405249 4669 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:49 crc kubenswrapper[4669]: E1210 15:21:49.405317 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.405288247 +0000 UTC m=+147.322234944 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.504692 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.504737 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.504748 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.504769 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.504780 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.607282 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.607342 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.607359 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.607384 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.607402 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.710926 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.710999 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.711022 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.711053 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.711075 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.814376 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.814434 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.814452 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.814474 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.814491 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.917602 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.917665 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.917688 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.917719 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:49 crc kubenswrapper[4669]: I1210 15:21:49.917742 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:49Z","lastTransitionTime":"2025-12-10T15:21:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.020894 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.021010 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.021034 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.021064 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.021092 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.124909 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.124963 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.124974 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.125022 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.125037 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.233409 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.233445 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.233455 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.233469 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.233501 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.336319 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.336377 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.336396 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.336420 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.336438 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.397945 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.398007 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:50 crc kubenswrapper[4669]: E1210 15:21:50.398202 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.398258 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:50 crc kubenswrapper[4669]: E1210 15:21:50.398396 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:50 crc kubenswrapper[4669]: E1210 15:21:50.398446 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.398741 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:50 crc kubenswrapper[4669]: E1210 15:21:50.399104 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.438473 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.438654 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.438688 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.438720 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.438744 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.542158 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.542274 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.542302 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.542332 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.542356 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.645518 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.645856 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.646026 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.646254 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.646451 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.750113 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.750182 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.750205 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.750281 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.750304 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.853433 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.853536 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.853556 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.853582 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.853600 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.956695 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.956766 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.956782 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.956830 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:50 crc kubenswrapper[4669]: I1210 15:21:50.956845 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:50Z","lastTransitionTime":"2025-12-10T15:21:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.018704 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.018763 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.018775 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.018792 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.018802 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:51Z","lastTransitionTime":"2025-12-10T15:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.107401 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.107744 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.107857 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.107964 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.108070 4669 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-10T15:21:51Z","lastTransitionTime":"2025-12-10T15:21:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.159594 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2"] Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.160320 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.172314 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.173377 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.173489 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.174203 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.206602 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-h76v4" podStartSLOduration=65.20657922 podStartE2EDuration="1m5.20657922s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.189413251 +0000 UTC m=+85.106359878" watchObservedRunningTime="2025-12-10 15:21:51.20657922 +0000 UTC m=+85.123525847" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.218909 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-jq6vl" podStartSLOduration=63.218885554 podStartE2EDuration="1m3.218885554s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.207066292 +0000 UTC m=+85.124012919" watchObservedRunningTime="2025-12-10 15:21:51.218885554 +0000 UTC m=+85.135832181" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.235631 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=29.235614694 podStartE2EDuration="29.235614694s" podCreationTimestamp="2025-12-10 15:21:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.235561432 +0000 UTC m=+85.152508059" watchObservedRunningTime="2025-12-10 15:21:51.235614694 +0000 UTC m=+85.152561341" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.267134 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=67.267116915 podStartE2EDuration="1m7.267116915s" podCreationTimestamp="2025-12-10 15:20:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.252590789 +0000 UTC m=+85.169537416" watchObservedRunningTime="2025-12-10 15:21:51.267116915 +0000 UTC m=+85.184063542" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.285511 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=68.285488875 podStartE2EDuration="1m8.285488875s" podCreationTimestamp="2025-12-10 15:20:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.267596407 +0000 UTC m=+85.184543034" watchObservedRunningTime="2025-12-10 15:21:51.285488875 +0000 UTC m=+85.202435502" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.313897 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-vck4f" podStartSLOduration=65.313878392 podStartE2EDuration="1m5.313878392s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.297752406 +0000 UTC m=+85.214699033" watchObservedRunningTime="2025-12-10 15:21:51.313878392 +0000 UTC m=+85.230825019" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.325569 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbc8a705-99d0-4496-9632-036cbbf62221-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.325609 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cbc8a705-99d0-4496-9632-036cbbf62221-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.325628 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/cbc8a705-99d0-4496-9632-036cbbf62221-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.325645 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cbc8a705-99d0-4496-9632-036cbbf62221-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.325708 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/cbc8a705-99d0-4496-9632-036cbbf62221-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.375800 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=17.375785489 podStartE2EDuration="17.375785489s" podCreationTimestamp="2025-12-10 15:21:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.37500348 +0000 UTC m=+85.291950097" watchObservedRunningTime="2025-12-10 15:21:51.375785489 +0000 UTC m=+85.292732116" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.399884 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-s4g62" podStartSLOduration=64.399865424 podStartE2EDuration="1m4.399865424s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.389667421 +0000 UTC m=+85.306614058" watchObservedRunningTime="2025-12-10 15:21:51.399865424 +0000 UTC m=+85.316812051" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.426692 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/cbc8a705-99d0-4496-9632-036cbbf62221-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.426971 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbc8a705-99d0-4496-9632-036cbbf62221-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.427057 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cbc8a705-99d0-4496-9632-036cbbf62221-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.427156 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/cbc8a705-99d0-4496-9632-036cbbf62221-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.427274 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cbc8a705-99d0-4496-9632-036cbbf62221-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.427340 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/cbc8a705-99d0-4496-9632-036cbbf62221-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.426851 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/cbc8a705-99d0-4496-9632-036cbbf62221-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.428106 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cbc8a705-99d0-4496-9632-036cbbf62221-service-ca\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.432803 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cbc8a705-99d0-4496-9632-036cbbf62221-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.442076 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-zqf8t" podStartSLOduration=64.442053301 podStartE2EDuration="1m4.442053301s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.416141833 +0000 UTC m=+85.333088460" watchObservedRunningTime="2025-12-10 15:21:51.442053301 +0000 UTC m=+85.358999928" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.442390 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=66.442386619 podStartE2EDuration="1m6.442386619s" podCreationTimestamp="2025-12-10 15:20:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.439873259 +0000 UTC m=+85.356819906" watchObservedRunningTime="2025-12-10 15:21:51.442386619 +0000 UTC m=+85.359333246" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.446908 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cbc8a705-99d0-4496-9632-036cbbf62221-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-xwzd2\" (UID: \"cbc8a705-99d0-4496-9632-036cbbf62221\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.471951 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" Dec 10 15:21:51 crc kubenswrapper[4669]: I1210 15:21:51.508113 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podStartSLOduration=64.508097818 podStartE2EDuration="1m4.508097818s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:51.507454062 +0000 UTC m=+85.424400689" watchObservedRunningTime="2025-12-10 15:21:51.508097818 +0000 UTC m=+85.425044445" Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.014428 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" event={"ID":"cbc8a705-99d0-4496-9632-036cbbf62221","Type":"ContainerStarted","Data":"24668cc31411ac7f8fc1330b9de5753e1db7d5698170c9418e0c94ffae56ef14"} Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.014490 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" event={"ID":"cbc8a705-99d0-4496-9632-036cbbf62221","Type":"ContainerStarted","Data":"437ce1d3e4d478aaf15e1bf5cb4821e2f3ad9a6513385bba66c8cadb3a90c120"} Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.032871 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-xwzd2" podStartSLOduration=65.032848912 podStartE2EDuration="1m5.032848912s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:21:52.031724755 +0000 UTC m=+85.948671392" watchObservedRunningTime="2025-12-10 15:21:52.032848912 +0000 UTC m=+85.949795549" Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.397695 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.397710 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:52 crc kubenswrapper[4669]: E1210 15:21:52.398570 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.397747 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:52 crc kubenswrapper[4669]: E1210 15:21:52.398623 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:52 crc kubenswrapper[4669]: I1210 15:21:52.397726 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:52 crc kubenswrapper[4669]: E1210 15:21:52.398969 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:52 crc kubenswrapper[4669]: E1210 15:21:52.398870 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:54 crc kubenswrapper[4669]: I1210 15:21:54.397095 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:54 crc kubenswrapper[4669]: I1210 15:21:54.397139 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:54 crc kubenswrapper[4669]: I1210 15:21:54.397199 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:54 crc kubenswrapper[4669]: E1210 15:21:54.397310 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:54 crc kubenswrapper[4669]: I1210 15:21:54.397346 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:54 crc kubenswrapper[4669]: E1210 15:21:54.397439 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:54 crc kubenswrapper[4669]: E1210 15:21:54.397509 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:54 crc kubenswrapper[4669]: E1210 15:21:54.397566 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:56 crc kubenswrapper[4669]: I1210 15:21:56.397447 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:56 crc kubenswrapper[4669]: I1210 15:21:56.397449 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:56 crc kubenswrapper[4669]: I1210 15:21:56.397516 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:56 crc kubenswrapper[4669]: I1210 15:21:56.397567 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:56 crc kubenswrapper[4669]: E1210 15:21:56.399251 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:56 crc kubenswrapper[4669]: E1210 15:21:56.399546 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:21:56 crc kubenswrapper[4669]: E1210 15:21:56.399631 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:56 crc kubenswrapper[4669]: E1210 15:21:56.399821 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:58 crc kubenswrapper[4669]: I1210 15:21:58.397827 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:21:58 crc kubenswrapper[4669]: I1210 15:21:58.397822 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:21:58 crc kubenswrapper[4669]: I1210 15:21:58.397862 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:21:58 crc kubenswrapper[4669]: I1210 15:21:58.398043 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:21:58 crc kubenswrapper[4669]: E1210 15:21:58.398310 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:21:58 crc kubenswrapper[4669]: E1210 15:21:58.398465 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:21:58 crc kubenswrapper[4669]: E1210 15:21:58.398640 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:21:58 crc kubenswrapper[4669]: E1210 15:21:58.398769 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:00 crc kubenswrapper[4669]: I1210 15:22:00.398014 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:00 crc kubenswrapper[4669]: I1210 15:22:00.398173 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:00 crc kubenswrapper[4669]: E1210 15:22:00.398746 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:00 crc kubenswrapper[4669]: I1210 15:22:00.398285 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:00 crc kubenswrapper[4669]: I1210 15:22:00.398173 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:00 crc kubenswrapper[4669]: I1210 15:22:00.399397 4669 scope.go:117] "RemoveContainer" containerID="491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904" Dec 10 15:22:00 crc kubenswrapper[4669]: E1210 15:22:00.399671 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" Dec 10 15:22:00 crc kubenswrapper[4669]: E1210 15:22:00.400354 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:00 crc kubenswrapper[4669]: E1210 15:22:00.400741 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:00 crc kubenswrapper[4669]: E1210 15:22:00.401046 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:02 crc kubenswrapper[4669]: I1210 15:22:02.398178 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:02 crc kubenswrapper[4669]: I1210 15:22:02.398258 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:02 crc kubenswrapper[4669]: I1210 15:22:02.398307 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:02 crc kubenswrapper[4669]: E1210 15:22:02.398402 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:02 crc kubenswrapper[4669]: I1210 15:22:02.398464 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:02 crc kubenswrapper[4669]: E1210 15:22:02.398577 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:02 crc kubenswrapper[4669]: E1210 15:22:02.398685 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:02 crc kubenswrapper[4669]: E1210 15:22:02.398985 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:04 crc kubenswrapper[4669]: I1210 15:22:04.397556 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:04 crc kubenswrapper[4669]: I1210 15:22:04.397560 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:04 crc kubenswrapper[4669]: E1210 15:22:04.398314 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:04 crc kubenswrapper[4669]: I1210 15:22:04.397671 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:04 crc kubenswrapper[4669]: E1210 15:22:04.398406 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:04 crc kubenswrapper[4669]: I1210 15:22:04.397672 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:04 crc kubenswrapper[4669]: E1210 15:22:04.398460 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:04 crc kubenswrapper[4669]: E1210 15:22:04.398544 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:06 crc kubenswrapper[4669]: I1210 15:22:06.397213 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:06 crc kubenswrapper[4669]: E1210 15:22:06.399006 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:06 crc kubenswrapper[4669]: I1210 15:22:06.399063 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:06 crc kubenswrapper[4669]: I1210 15:22:06.399092 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:06 crc kubenswrapper[4669]: E1210 15:22:06.399285 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:06 crc kubenswrapper[4669]: E1210 15:22:06.399365 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:06 crc kubenswrapper[4669]: I1210 15:22:06.399104 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:06 crc kubenswrapper[4669]: E1210 15:22:06.399628 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:06 crc kubenswrapper[4669]: I1210 15:22:06.715363 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:06 crc kubenswrapper[4669]: E1210 15:22:06.715529 4669 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:22:06 crc kubenswrapper[4669]: E1210 15:22:06.715614 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs podName:a72cfbf6-e882-4e1e-8809-b6735aae5dfe nodeName:}" failed. No retries permitted until 2025-12-10 15:23:10.715589613 +0000 UTC m=+164.632536260 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs") pod "network-metrics-daemon-rz9mm" (UID: "a72cfbf6-e882-4e1e-8809-b6735aae5dfe") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 10 15:22:08 crc kubenswrapper[4669]: I1210 15:22:08.397803 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:08 crc kubenswrapper[4669]: I1210 15:22:08.397921 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:08 crc kubenswrapper[4669]: I1210 15:22:08.397851 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:08 crc kubenswrapper[4669]: I1210 15:22:08.397847 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:08 crc kubenswrapper[4669]: E1210 15:22:08.397992 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:08 crc kubenswrapper[4669]: E1210 15:22:08.398100 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:08 crc kubenswrapper[4669]: E1210 15:22:08.398683 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:08 crc kubenswrapper[4669]: E1210 15:22:08.398933 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:10 crc kubenswrapper[4669]: I1210 15:22:10.397689 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:10 crc kubenswrapper[4669]: I1210 15:22:10.397740 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:10 crc kubenswrapper[4669]: I1210 15:22:10.397840 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:10 crc kubenswrapper[4669]: E1210 15:22:10.397916 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:10 crc kubenswrapper[4669]: I1210 15:22:10.398315 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:10 crc kubenswrapper[4669]: E1210 15:22:10.398362 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:10 crc kubenswrapper[4669]: E1210 15:22:10.398524 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:10 crc kubenswrapper[4669]: E1210 15:22:10.398170 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:12 crc kubenswrapper[4669]: I1210 15:22:12.398071 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:12 crc kubenswrapper[4669]: I1210 15:22:12.398323 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:12 crc kubenswrapper[4669]: E1210 15:22:12.398457 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:12 crc kubenswrapper[4669]: I1210 15:22:12.398538 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:12 crc kubenswrapper[4669]: I1210 15:22:12.398610 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:12 crc kubenswrapper[4669]: E1210 15:22:12.398693 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:12 crc kubenswrapper[4669]: E1210 15:22:12.398887 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:12 crc kubenswrapper[4669]: E1210 15:22:12.399025 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:13 crc kubenswrapper[4669]: I1210 15:22:13.398644 4669 scope.go:117] "RemoveContainer" containerID="491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904" Dec 10 15:22:13 crc kubenswrapper[4669]: E1210 15:22:13.398869 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-6hbdc_openshift-ovn-kubernetes(8dc35dac-41a2-4bc1-ad26-5f515126921e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" Dec 10 15:22:14 crc kubenswrapper[4669]: I1210 15:22:14.397532 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:14 crc kubenswrapper[4669]: I1210 15:22:14.397562 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:14 crc kubenswrapper[4669]: E1210 15:22:14.397707 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:14 crc kubenswrapper[4669]: I1210 15:22:14.397773 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:14 crc kubenswrapper[4669]: E1210 15:22:14.398013 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:14 crc kubenswrapper[4669]: I1210 15:22:14.398406 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:14 crc kubenswrapper[4669]: E1210 15:22:14.398850 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:14 crc kubenswrapper[4669]: E1210 15:22:14.398969 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:16 crc kubenswrapper[4669]: I1210 15:22:16.397371 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:16 crc kubenswrapper[4669]: I1210 15:22:16.397435 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:16 crc kubenswrapper[4669]: I1210 15:22:16.397482 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:16 crc kubenswrapper[4669]: E1210 15:22:16.398311 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:16 crc kubenswrapper[4669]: I1210 15:22:16.398346 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:16 crc kubenswrapper[4669]: E1210 15:22:16.398436 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:16 crc kubenswrapper[4669]: E1210 15:22:16.398597 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:16 crc kubenswrapper[4669]: E1210 15:22:16.398674 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:18 crc kubenswrapper[4669]: I1210 15:22:18.397036 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:18 crc kubenswrapper[4669]: E1210 15:22:18.397763 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:18 crc kubenswrapper[4669]: I1210 15:22:18.397133 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:18 crc kubenswrapper[4669]: E1210 15:22:18.397999 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:18 crc kubenswrapper[4669]: I1210 15:22:18.397108 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:18 crc kubenswrapper[4669]: I1210 15:22:18.397145 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:18 crc kubenswrapper[4669]: E1210 15:22:18.398237 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:18 crc kubenswrapper[4669]: E1210 15:22:18.398456 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:20 crc kubenswrapper[4669]: I1210 15:22:20.397927 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:20 crc kubenswrapper[4669]: I1210 15:22:20.397991 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:20 crc kubenswrapper[4669]: E1210 15:22:20.398136 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:20 crc kubenswrapper[4669]: I1210 15:22:20.398183 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:20 crc kubenswrapper[4669]: I1210 15:22:20.398269 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:20 crc kubenswrapper[4669]: E1210 15:22:20.398373 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:20 crc kubenswrapper[4669]: E1210 15:22:20.398523 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:20 crc kubenswrapper[4669]: E1210 15:22:20.398656 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:22 crc kubenswrapper[4669]: I1210 15:22:22.397609 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:22 crc kubenswrapper[4669]: I1210 15:22:22.397705 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:22 crc kubenswrapper[4669]: I1210 15:22:22.397715 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:22 crc kubenswrapper[4669]: E1210 15:22:22.397838 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:22 crc kubenswrapper[4669]: I1210 15:22:22.397885 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:22 crc kubenswrapper[4669]: E1210 15:22:22.397968 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:22 crc kubenswrapper[4669]: E1210 15:22:22.398143 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:22 crc kubenswrapper[4669]: E1210 15:22:22.398333 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:24 crc kubenswrapper[4669]: I1210 15:22:24.397287 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:24 crc kubenswrapper[4669]: I1210 15:22:24.397311 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:24 crc kubenswrapper[4669]: I1210 15:22:24.397363 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:24 crc kubenswrapper[4669]: E1210 15:22:24.397409 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:24 crc kubenswrapper[4669]: I1210 15:22:24.397430 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:24 crc kubenswrapper[4669]: E1210 15:22:24.397506 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:24 crc kubenswrapper[4669]: E1210 15:22:24.397572 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:24 crc kubenswrapper[4669]: E1210 15:22:24.397771 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:25 crc kubenswrapper[4669]: I1210 15:22:25.131303 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/1.log" Dec 10 15:22:25 crc kubenswrapper[4669]: I1210 15:22:25.132673 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/0.log" Dec 10 15:22:25 crc kubenswrapper[4669]: I1210 15:22:25.132755 4669 generic.go:334] "Generic (PLEG): container finished" podID="3dda8be1-e5bc-42a3-820e-4285b75bf8c2" containerID="235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778" exitCode=1 Dec 10 15:22:25 crc kubenswrapper[4669]: I1210 15:22:25.132804 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerDied","Data":"235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778"} Dec 10 15:22:25 crc kubenswrapper[4669]: I1210 15:22:25.132855 4669 scope.go:117] "RemoveContainer" containerID="96dbb329f6257b316430f20a0b28e6cf13b163293c1c224514803d5f2c6df72c" Dec 10 15:22:25 crc kubenswrapper[4669]: I1210 15:22:25.133412 4669 scope.go:117] "RemoveContainer" containerID="235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778" Dec 10 15:22:25 crc kubenswrapper[4669]: E1210 15:22:25.133670 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-s4g62_openshift-multus(3dda8be1-e5bc-42a3-820e-4285b75bf8c2)\"" pod="openshift-multus/multus-s4g62" podUID="3dda8be1-e5bc-42a3-820e-4285b75bf8c2" Dec 10 15:22:26 crc kubenswrapper[4669]: I1210 15:22:26.138184 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/1.log" Dec 10 15:22:26 crc kubenswrapper[4669]: E1210 15:22:26.343680 4669 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 10 15:22:26 crc kubenswrapper[4669]: I1210 15:22:26.397947 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:26 crc kubenswrapper[4669]: I1210 15:22:26.397947 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:26 crc kubenswrapper[4669]: I1210 15:22:26.397961 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:26 crc kubenswrapper[4669]: I1210 15:22:26.398066 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:26 crc kubenswrapper[4669]: E1210 15:22:26.399092 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:26 crc kubenswrapper[4669]: E1210 15:22:26.399174 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:26 crc kubenswrapper[4669]: E1210 15:22:26.399232 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:26 crc kubenswrapper[4669]: E1210 15:22:26.399269 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:26 crc kubenswrapper[4669]: E1210 15:22:26.603315 4669 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 15:22:28 crc kubenswrapper[4669]: I1210 15:22:28.398399 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:28 crc kubenswrapper[4669]: I1210 15:22:28.398475 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:28 crc kubenswrapper[4669]: I1210 15:22:28.398488 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:28 crc kubenswrapper[4669]: E1210 15:22:28.398585 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:28 crc kubenswrapper[4669]: I1210 15:22:28.398740 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:28 crc kubenswrapper[4669]: E1210 15:22:28.398764 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:28 crc kubenswrapper[4669]: E1210 15:22:28.398940 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:28 crc kubenswrapper[4669]: E1210 15:22:28.399070 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:28 crc kubenswrapper[4669]: I1210 15:22:28.399852 4669 scope.go:117] "RemoveContainer" containerID="491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904" Dec 10 15:22:29 crc kubenswrapper[4669]: I1210 15:22:29.151988 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/3.log" Dec 10 15:22:29 crc kubenswrapper[4669]: I1210 15:22:29.153651 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerStarted","Data":"b3770a9b96f7ba48f28990dbe8f32b4fa6f0e48d4ed8ea413dbb43316bd7c79b"} Dec 10 15:22:29 crc kubenswrapper[4669]: I1210 15:22:29.154980 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:22:29 crc kubenswrapper[4669]: I1210 15:22:29.208856 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podStartSLOduration=102.208838415 podStartE2EDuration="1m42.208838415s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:29.208419955 +0000 UTC m=+123.125366612" watchObservedRunningTime="2025-12-10 15:22:29.208838415 +0000 UTC m=+123.125785042" Dec 10 15:22:29 crc kubenswrapper[4669]: I1210 15:22:29.995691 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rz9mm"] Dec 10 15:22:29 crc kubenswrapper[4669]: I1210 15:22:29.995989 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:29 crc kubenswrapper[4669]: E1210 15:22:29.996265 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:30 crc kubenswrapper[4669]: I1210 15:22:30.397096 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:30 crc kubenswrapper[4669]: I1210 15:22:30.397118 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:30 crc kubenswrapper[4669]: E1210 15:22:30.397595 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:30 crc kubenswrapper[4669]: E1210 15:22:30.397690 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:30 crc kubenswrapper[4669]: I1210 15:22:30.397947 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:30 crc kubenswrapper[4669]: E1210 15:22:30.398030 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:31 crc kubenswrapper[4669]: I1210 15:22:31.398130 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:31 crc kubenswrapper[4669]: E1210 15:22:31.398490 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:31 crc kubenswrapper[4669]: E1210 15:22:31.604319 4669 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 15:22:32 crc kubenswrapper[4669]: I1210 15:22:32.397861 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:32 crc kubenswrapper[4669]: I1210 15:22:32.397923 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:32 crc kubenswrapper[4669]: E1210 15:22:32.398109 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:32 crc kubenswrapper[4669]: I1210 15:22:32.398151 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:32 crc kubenswrapper[4669]: E1210 15:22:32.398390 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:32 crc kubenswrapper[4669]: E1210 15:22:32.398540 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:33 crc kubenswrapper[4669]: I1210 15:22:33.397462 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:33 crc kubenswrapper[4669]: E1210 15:22:33.397679 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:34 crc kubenswrapper[4669]: I1210 15:22:34.398161 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:34 crc kubenswrapper[4669]: E1210 15:22:34.399741 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:34 crc kubenswrapper[4669]: I1210 15:22:34.398473 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:34 crc kubenswrapper[4669]: E1210 15:22:34.400348 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:34 crc kubenswrapper[4669]: I1210 15:22:34.398431 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:34 crc kubenswrapper[4669]: E1210 15:22:34.400956 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:35 crc kubenswrapper[4669]: I1210 15:22:35.397589 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:35 crc kubenswrapper[4669]: E1210 15:22:35.397818 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:36 crc kubenswrapper[4669]: I1210 15:22:36.397498 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:36 crc kubenswrapper[4669]: I1210 15:22:36.397531 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:36 crc kubenswrapper[4669]: E1210 15:22:36.398851 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:36 crc kubenswrapper[4669]: I1210 15:22:36.398866 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:36 crc kubenswrapper[4669]: E1210 15:22:36.399004 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:36 crc kubenswrapper[4669]: E1210 15:22:36.399150 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:36 crc kubenswrapper[4669]: E1210 15:22:36.605754 4669 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 10 15:22:37 crc kubenswrapper[4669]: I1210 15:22:37.397662 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:37 crc kubenswrapper[4669]: I1210 15:22:37.398267 4669 scope.go:117] "RemoveContainer" containerID="235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778" Dec 10 15:22:37 crc kubenswrapper[4669]: E1210 15:22:37.398061 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:38 crc kubenswrapper[4669]: I1210 15:22:38.188126 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/1.log" Dec 10 15:22:38 crc kubenswrapper[4669]: I1210 15:22:38.188517 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerStarted","Data":"203ebc8b9d6454da6c96a0e98afe28bf69e80814573f297ae868fdd3a15ee9da"} Dec 10 15:22:38 crc kubenswrapper[4669]: I1210 15:22:38.398176 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:38 crc kubenswrapper[4669]: E1210 15:22:38.398418 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:38 crc kubenswrapper[4669]: I1210 15:22:38.398745 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:38 crc kubenswrapper[4669]: E1210 15:22:38.398841 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:38 crc kubenswrapper[4669]: I1210 15:22:38.399031 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:38 crc kubenswrapper[4669]: E1210 15:22:38.399115 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:39 crc kubenswrapper[4669]: I1210 15:22:39.397338 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:39 crc kubenswrapper[4669]: E1210 15:22:39.397688 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:40 crc kubenswrapper[4669]: I1210 15:22:40.397510 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:40 crc kubenswrapper[4669]: I1210 15:22:40.397569 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:40 crc kubenswrapper[4669]: E1210 15:22:40.397721 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 10 15:22:40 crc kubenswrapper[4669]: E1210 15:22:40.398011 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 10 15:22:40 crc kubenswrapper[4669]: I1210 15:22:40.398566 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:40 crc kubenswrapper[4669]: E1210 15:22:40.398701 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 10 15:22:41 crc kubenswrapper[4669]: I1210 15:22:41.397199 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:41 crc kubenswrapper[4669]: E1210 15:22:41.397705 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rz9mm" podUID="a72cfbf6-e882-4e1e-8809-b6735aae5dfe" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.355962 4669 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.422534 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.422540 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.422463 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.425927 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.425944 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.430253 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.436908 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.443747 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qhbj9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.445821 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.455933 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.456531 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.458297 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.458561 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.458784 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.459608 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.464829 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.465990 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7j6zx"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.466457 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wvzzm"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.466877 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.467131 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qhbj9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.467330 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.467378 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.467650 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.467908 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.469636 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.471685 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.472931 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.473501 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.474101 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.474416 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.476569 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.478899 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.479633 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.480078 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.480145 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.480485 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.480771 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.481156 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.481253 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.487793 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.488177 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.497372 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.498469 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.499790 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.501430 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.501814 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.501890 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.502587 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-bl72p"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.502951 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.517981 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.518495 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.518845 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.518881 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-t4zfg"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519197 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519356 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519477 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519613 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519760 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519872 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.519996 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.520143 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.520160 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.521064 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.521257 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.521324 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.521523 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.521537 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.521713 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.522334 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.522501 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-854cf"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.522751 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.523885 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.522748 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.529911 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.530080 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.530368 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.530373 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.530808 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.530989 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.531140 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.531323 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.531374 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.531492 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.531550 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.531663 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.540803 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.541606 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-95c68"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.542732 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.543783 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.546861 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.548665 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.549266 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.549444 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.549679 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.549975 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.550131 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.550382 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.551536 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.551698 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.553243 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.553468 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.553977 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.554366 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.554561 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.569663 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.571014 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.571952 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.576448 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.576833 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.576966 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.576583 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.577649 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.585482 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.586686 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.587004 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.587201 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.587996 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.588691 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.590469 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.590640 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.591198 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.596048 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.596386 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.596564 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.596761 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.596920 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.597163 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.597393 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.600336 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.600693 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-tscn9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.600916 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.601319 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.601501 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.601710 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.603743 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.604421 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.608999 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609253 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609302 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609061 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609462 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609253 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609274 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609665 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609786 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609885 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-r92cx"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.609964 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.610187 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.610564 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.611128 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.612228 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.612757 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.612958 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.614991 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.615359 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.615661 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dsw2s"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.616273 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.617042 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.619021 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qbnt7"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.619567 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dzj9z"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.619880 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.620056 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.620279 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.620443 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.622133 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6c97z"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.622714 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.623977 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3339e1ea-db38-49df-a24e-88b4252274d2-config\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.635751 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.635853 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3339e1ea-db38-49df-a24e-88b4252274d2-machine-approver-tls\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.635946 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/de92027c-22f4-4399-98e2-dd919dc0436d-node-bootstrap-token\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636043 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-serving-cert\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636286 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-client-ca\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636396 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5csv\" (UniqueName: \"kubernetes.io/projected/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-kube-api-access-m5csv\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636499 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-audit-policies\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636590 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b71401-32e6-4421-b992-155809accfe1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.628654 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636824 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-image-import-ca\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.636939 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-serving-cert\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637026 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv69v\" (UniqueName: \"kubernetes.io/projected/f8b0b5c5-a647-4bae-867e-9745ae5ec534-kube-api-access-zv69v\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637114 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2slg\" (UniqueName: \"kubernetes.io/projected/de92027c-22f4-4399-98e2-dd919dc0436d-kube-api-access-p2slg\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637371 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b5c5-a647-4bae-867e-9745ae5ec534-srv-cert\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637524 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/372626a6-fd28-4cbb-93e5-e6520b30c3ce-node-pullsecrets\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637554 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637571 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/439c2c2a-7b67-41a3-8544-8d2362d0db1b-audit-dir\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637605 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdtrq\" (UniqueName: \"kubernetes.io/projected/439c2c2a-7b67-41a3-8544-8d2362d0db1b-kube-api-access-sdtrq\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637689 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-config\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637742 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637768 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-encryption-config\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637782 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9b71401-32e6-4421-b992-155809accfe1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637807 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d43ac716-9d6c-4acd-9d08-94bb006885d4-cert\") pod \"ingress-canary-t4zfg\" (UID: \"d43ac716-9d6c-4acd-9d08-94bb006885d4\") " pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637831 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bfw68\" (UniqueName: \"kubernetes.io/projected/372626a6-fd28-4cbb-93e5-e6520b30c3ce-kube-api-access-bfw68\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637857 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqwh9\" (UniqueName: \"kubernetes.io/projected/3339e1ea-db38-49df-a24e-88b4252274d2-kube-api-access-lqwh9\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637873 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-images\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637914 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3339e1ea-db38-49df-a24e-88b4252274d2-auth-proxy-config\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637935 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-config\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637953 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-audit\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637970 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sgc4h\" (UniqueName: \"kubernetes.io/projected/fdaed5ac-0cc4-49de-aa15-33cc2993afd3-kube-api-access-sgc4h\") pod \"cluster-samples-operator-665b6dd947-5kndl\" (UID: \"fdaed5ac-0cc4-49de-aa15-33cc2993afd3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.637993 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/372626a6-fd28-4cbb-93e5-e6520b30c3ce-audit-dir\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638008 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-client-ca\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638038 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-serving-cert\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638055 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tk92f\" (UniqueName: \"kubernetes.io/projected/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-kube-api-access-tk92f\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638070 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638091 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-config\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638704 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/de92027c-22f4-4399-98e2-dd919dc0436d-certs\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638794 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fdaed5ac-0cc4-49de-aa15-33cc2993afd3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5kndl\" (UID: \"fdaed5ac-0cc4-49de-aa15-33cc2993afd3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638836 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr9v5\" (UniqueName: \"kubernetes.io/projected/8fad0a45-7f20-4957-b7b6-c4ebf59d799c-kube-api-access-jr9v5\") pod \"package-server-manager-789f6589d5-vxbnr\" (UID: \"8fad0a45-7f20-4957-b7b6-c4ebf59d799c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638877 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-config\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638902 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khjvw\" (UniqueName: \"kubernetes.io/projected/a9b71401-32e6-4421-b992-155809accfe1-kube-api-access-khjvw\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.638954 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8fad0a45-7f20-4957-b7b6-c4ebf59d799c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vxbnr\" (UID: \"8fad0a45-7f20-4957-b7b6-c4ebf59d799c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639007 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-etcd-client\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639047 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14ff27a3-e946-4b7c-a56c-d7da016d86df-serving-cert\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639077 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b5c5-a647-4bae-867e-9745ae5ec534-profile-collector-cert\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639129 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-encryption-config\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639172 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-etcd-client\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639196 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639259 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gbkcv\" (UniqueName: \"kubernetes.io/projected/d43ac716-9d6c-4acd-9d08-94bb006885d4-kube-api-access-gbkcv\") pod \"ingress-canary-t4zfg\" (UID: \"d43ac716-9d6c-4acd-9d08-94bb006885d4\") " pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639307 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.639339 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpqwc\" (UniqueName: \"kubernetes.io/projected/14ff27a3-e946-4b7c-a56c-d7da016d86df-kube-api-access-qpqwc\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.641756 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wvzzm"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.647059 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.656131 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.656641 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.657599 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.657880 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.664484 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-b6x7l"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.665238 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.672644 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7j6zx"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.678350 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-bqznv"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.683957 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.690913 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.692123 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.692238 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.693401 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.698052 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.700265 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.703093 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rx9fw"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.705469 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.709258 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.709643 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.720702 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g997m"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.721642 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.722058 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n4dsm"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.723349 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.723628 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.726803 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742549 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr9v5\" (UniqueName: \"kubernetes.io/projected/8fad0a45-7f20-4957-b7b6-c4ebf59d799c-kube-api-access-jr9v5\") pod \"package-server-manager-789f6589d5-vxbnr\" (UID: \"8fad0a45-7f20-4957-b7b6-c4ebf59d799c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742596 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvhcz\" (UniqueName: \"kubernetes.io/projected/7d419e0a-917c-410c-820b-ddfab808a3fe-kube-api-access-vvhcz\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742622 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742643 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khjvw\" (UniqueName: \"kubernetes.io/projected/a9b71401-32e6-4421-b992-155809accfe1-kube-api-access-khjvw\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742661 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-service-ca-bundle\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742680 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsgfz\" (UniqueName: \"kubernetes.io/projected/99515d23-15fa-4bae-be9e-c0a2e2c46c89-kube-api-access-gsgfz\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742694 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742700 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8fad0a45-7f20-4957-b7b6-c4ebf59d799c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vxbnr\" (UID: \"8fad0a45-7f20-4957-b7b6-c4ebf59d799c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742765 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a88168e0-0728-4c47-8d89-5ece2fa293b9-secret-volume\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742787 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j8dz\" (UniqueName: \"kubernetes.io/projected/d2501fbc-7568-4a6a-8200-465507ac4e49-kube-api-access-8j8dz\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742819 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-etcd-client\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742840 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742859 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbst6\" (UniqueName: \"kubernetes.io/projected/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-kube-api-access-hbst6\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742877 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742895 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b5c5-a647-4bae-867e-9745ae5ec534-profile-collector-cert\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742914 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-encryption-config\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742929 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742944 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6p45s\" (UniqueName: \"kubernetes.io/projected/a88168e0-0728-4c47-8d89-5ece2fa293b9-kube-api-access-6p45s\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742961 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e32e3dfe-0229-477b-8e6c-bd40314231ee-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zszbg\" (UID: \"e32e3dfe-0229-477b-8e6c-bd40314231ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742979 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-dir\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.742997 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743016 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2f7dm\" (UniqueName: \"kubernetes.io/projected/6b038ccf-005f-4c1b-b9ec-d9db407f528a-kube-api-access-2f7dm\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743047 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpqwc\" (UniqueName: \"kubernetes.io/projected/14ff27a3-e946-4b7c-a56c-d7da016d86df-kube-api-access-qpqwc\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743065 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrd87\" (UniqueName: \"kubernetes.io/projected/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-kube-api-access-mrd87\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743079 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-policies\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743098 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3339e1ea-db38-49df-a24e-88b4252274d2-config\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743131 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-serving-cert\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743146 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-client-ca\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743163 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31533b6a-0dfa-4429-b7f9-097b52b009e6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743178 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3339e1ea-db38-49df-a24e-88b4252274d2-machine-approver-tls\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743194 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/de92027c-22f4-4399-98e2-dd919dc0436d-node-bootstrap-token\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743210 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5csv\" (UniqueName: \"kubernetes.io/projected/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-kube-api-access-m5csv\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743240 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b71401-32e6-4421-b992-155809accfe1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743256 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-image-import-ca\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743273 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv69v\" (UniqueName: \"kubernetes.io/projected/f8b0b5c5-a647-4bae-867e-9745ae5ec534-kube-api-access-zv69v\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743291 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-oauth-config\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743307 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4eeead7e-1859-447b-a93d-f68fb80a119e-trusted-ca\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743334 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99515d23-15fa-4bae-be9e-c0a2e2c46c89-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743350 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b5c5-a647-4bae-867e-9745ae5ec534-srv-cert\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743372 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743395 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/439c2c2a-7b67-41a3-8544-8d2362d0db1b-audit-dir\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743411 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcvvc\" (UniqueName: \"kubernetes.io/projected/e32e3dfe-0229-477b-8e6c-bd40314231ee-kube-api-access-fcvvc\") pod \"control-plane-machine-set-operator-78cbb6b69f-zszbg\" (UID: \"e32e3dfe-0229-477b-8e6c-bd40314231ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743441 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/372626a6-fd28-4cbb-93e5-e6520b30c3ce-node-pullsecrets\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743463 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/31533b6a-0dfa-4429-b7f9-097b52b009e6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743482 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-config\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743500 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743521 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-encryption-config\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743540 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bef917d6-d516-41ca-ab40-e5d138f08a69-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743556 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d43ac716-9d6c-4acd-9d08-94bb006885d4-cert\") pod \"ingress-canary-t4zfg\" (UID: \"d43ac716-9d6c-4acd-9d08-94bb006885d4\") " pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743575 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31533b6a-0dfa-4429-b7f9-097b52b009e6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743591 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/17036398-eaeb-4ce2-9420-57cd9213ecbc-metrics-tls\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743610 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bfw68\" (UniqueName: \"kubernetes.io/projected/372626a6-fd28-4cbb-93e5-e6520b30c3ce-kube-api-access-bfw68\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743627 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqwh9\" (UniqueName: \"kubernetes.io/projected/3339e1ea-db38-49df-a24e-88b4252274d2-kube-api-access-lqwh9\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743649 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-config\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743672 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743692 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sgc4h\" (UniqueName: \"kubernetes.io/projected/fdaed5ac-0cc4-49de-aa15-33cc2993afd3-kube-api-access-sgc4h\") pod \"cluster-samples-operator-665b6dd947-5kndl\" (UID: \"fdaed5ac-0cc4-49de-aa15-33cc2993afd3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743714 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-audit\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743731 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-client-ca\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743747 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-trusted-ca\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743768 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-trusted-ca-bundle\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743785 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743806 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b038ccf-005f-4c1b-b9ec-d9db407f528a-proxy-tls\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743827 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-serving-cert\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743848 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tk92f\" (UniqueName: \"kubernetes.io/projected/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-kube-api-access-tk92f\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743863 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-config\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743878 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743895 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fdaed5ac-0cc4-49de-aa15-33cc2993afd3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5kndl\" (UID: \"fdaed5ac-0cc4-49de-aa15-33cc2993afd3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743940 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgrpk\" (UniqueName: \"kubernetes.io/projected/f726edde-37ea-43cd-8c7c-16b1263647d2-kube-api-access-cgrpk\") pod \"multus-admission-controller-857f4d67dd-r92cx\" (UID: \"f726edde-37ea-43cd-8c7c-16b1263647d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743956 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-serving-cert\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743975 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.743990 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744009 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99515d23-15fa-4bae-be9e-c0a2e2c46c89-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744029 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-config\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744046 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09b754f9-20f8-4d4f-ad25-8fed880f53bb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744072 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14ff27a3-e946-4b7c-a56c-d7da016d86df-serving-cert\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744090 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bef917d6-d516-41ca-ab40-e5d138f08a69-config\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744106 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744154 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-etcd-client\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744172 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gbkcv\" (UniqueName: \"kubernetes.io/projected/d43ac716-9d6c-4acd-9d08-94bb006885d4-kube-api-access-gbkcv\") pod \"ingress-canary-t4zfg\" (UID: \"d43ac716-9d6c-4acd-9d08-94bb006885d4\") " pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744191 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lsllf\" (UniqueName: \"kubernetes.io/projected/41607592-fbbb-4003-b9eb-b11cbce16627-kube-api-access-lsllf\") pod \"downloads-7954f5f757-tscn9\" (UID: \"41607592-fbbb-4003-b9eb-b11cbce16627\") " pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744210 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-srv-cert\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744240 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744256 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bef917d6-d516-41ca-ab40-e5d138f08a69-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744285 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2501fbc-7568-4a6a-8200-465507ac4e49-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744303 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744327 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-serving-cert\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744342 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52fb4\" (UniqueName: \"kubernetes.io/projected/441f1126-5609-431e-bcb2-3e4b6da1b19a-kube-api-access-52fb4\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744365 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz5cl\" (UniqueName: \"kubernetes.io/projected/723f1344-9955-47c5-adca-3c5059f7a61f-kube-api-access-jz5cl\") pod \"migrator-59844c95c7-qd9lk\" (UID: \"723f1344-9955-47c5-adca-3c5059f7a61f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744381 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-console-config\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744396 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744412 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-audit-policies\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744428 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-oauth-serving-cert\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744446 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4eeead7e-1859-447b-a93d-f68fb80a119e-metrics-tls\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744462 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744480 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-serving-cert\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744497 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54hzj\" (UniqueName: \"kubernetes.io/projected/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-kube-api-access-54hzj\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744521 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744538 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-serving-cert\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744553 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2slg\" (UniqueName: \"kubernetes.io/projected/de92027c-22f4-4399-98e2-dd919dc0436d-kube-api-access-p2slg\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744568 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-config\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744583 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/441f1126-5609-431e-bcb2-3e4b6da1b19a-config\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744601 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744619 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdtrq\" (UniqueName: \"kubernetes.io/projected/439c2c2a-7b67-41a3-8544-8d2362d0db1b-kube-api-access-sdtrq\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744636 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a88168e0-0728-4c47-8d89-5ece2fa293b9-config-volume\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744651 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-config\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744666 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-service-ca\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744683 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4eeead7e-1859-447b-a93d-f68fb80a119e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.744716 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9b71401-32e6-4421-b992-155809accfe1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746044 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-images\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746076 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746094 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09b754f9-20f8-4d4f-ad25-8fed880f53bb-config\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746112 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b754f9-20f8-4d4f-ad25-8fed880f53bb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746130 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxl7l\" (UniqueName: \"kubernetes.io/projected/17036398-eaeb-4ce2-9420-57cd9213ecbc-kube-api-access-pxl7l\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746158 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skdss\" (UniqueName: \"kubernetes.io/projected/4eeead7e-1859-447b-a93d-f68fb80a119e-kube-api-access-skdss\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746174 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwqgp\" (UniqueName: \"kubernetes.io/projected/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-kube-api-access-mwqgp\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746189 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746206 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3339e1ea-db38-49df-a24e-88b4252274d2-auth-proxy-config\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746243 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/372626a6-fd28-4cbb-93e5-e6520b30c3ce-audit-dir\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746261 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f726edde-37ea-43cd-8c7c-16b1263647d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r92cx\" (UID: \"f726edde-37ea-43cd-8c7c-16b1263647d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746279 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w9kr\" (UniqueName: \"kubernetes.io/projected/95d741d8-41e2-4b8d-9fcd-b11f972345bf-kube-api-access-5w9kr\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746296 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/441f1126-5609-431e-bcb2-3e4b6da1b19a-serving-cert\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746312 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2501fbc-7568-4a6a-8200-465507ac4e49-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746327 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/de92027c-22f4-4399-98e2-dd919dc0436d-certs\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746344 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5v6f8\" (UniqueName: \"kubernetes.io/projected/31533b6a-0dfa-4429-b7f9-097b52b009e6-kube-api-access-5v6f8\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.746359 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6b038ccf-005f-4c1b-b9ec-d9db407f528a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.749161 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-config\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.753795 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14ff27a3-e946-4b7c-a56c-d7da016d86df-serving-cert\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.754150 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t4zfg"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.754208 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.755922 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.758297 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-etcd-serving-ca\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.758817 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9b71401-32e6-4421-b992-155809accfe1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.759666 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.759966 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-images\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.760039 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/439c2c2a-7b67-41a3-8544-8d2362d0db1b-audit-dir\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.761574 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-image-import-ca\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.769589 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.761937 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-etcd-client\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.762051 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3339e1ea-db38-49df-a24e-88b4252274d2-config\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.762650 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-serving-cert\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.762666 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.763425 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-client-ca\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764135 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-client-ca\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764183 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/372626a6-fd28-4cbb-93e5-e6520b30c3ce-node-pullsecrets\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764423 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b5c5-a647-4bae-867e-9745ae5ec534-srv-cert\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764533 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764637 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-audit\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764706 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-config\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764729 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-audit-policies\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.764798 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.765500 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/3339e1ea-db38-49df-a24e-88b4252274d2-auth-proxy-config\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.766344 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/de92027c-22f4-4399-98e2-dd919dc0436d-node-bootstrap-token\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.766492 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/372626a6-fd28-4cbb-93e5-e6520b30c3ce-audit-dir\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.766537 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/3339e1ea-db38-49df-a24e-88b4252274d2-machine-approver-tls\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.767242 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/372626a6-fd28-4cbb-93e5-e6520b30c3ce-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.767524 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9b71401-32e6-4421-b992-155809accfe1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.767614 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-config\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.767920 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/372626a6-fd28-4cbb-93e5-e6520b30c3ce-encryption-config\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.768896 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-serving-cert\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.769422 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.763051 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/439c2c2a-7b67-41a3-8544-8d2362d0db1b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.771847 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-serving-cert\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.771962 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.772320 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fdaed5ac-0cc4-49de-aa15-33cc2993afd3-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-5kndl\" (UID: \"fdaed5ac-0cc4-49de-aa15-33cc2993afd3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.773895 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.774449 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-config\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.775428 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-encryption-config\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.776288 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/d43ac716-9d6c-4acd-9d08-94bb006885d4-cert\") pod \"ingress-canary-t4zfg\" (UID: \"d43ac716-9d6c-4acd-9d08-94bb006885d4\") " pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.776292 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/de92027c-22f4-4399-98e2-dd919dc0436d-certs\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.777591 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.778794 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.780993 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-r92cx"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.782010 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6c97z"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.783022 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-95c68"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.784037 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.786289 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.786319 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tscn9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.787027 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.790154 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.791862 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/439c2c2a-7b67-41a3-8544-8d2362d0db1b-etcd-client\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.792028 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qbnt7"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.792051 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.792329 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/8fad0a45-7f20-4957-b7b6-c4ebf59d799c-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-vxbnr\" (UID: \"8fad0a45-7f20-4957-b7b6-c4ebf59d799c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.792440 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.794045 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dzj9z"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.795152 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f8b0b5c5-a647-4bae-867e-9745ae5ec534-profile-collector-cert\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.795192 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.797560 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mjz6f"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.798613 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.800324 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.802071 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.803356 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.804746 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.804816 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rx9fw"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.806707 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g997m"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.808419 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.810336 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.813326 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n4dsm"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.815362 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-bqznv"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.816074 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.817375 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dsw2s"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.818650 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.819968 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mjz6f"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.820947 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.825123 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.825857 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-vzc96"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.827494 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-vzc96"] Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.827605 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.845439 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.846853 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/441f1126-5609-431e-bcb2-3e4b6da1b19a-serving-cert\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.846946 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2501fbc-7568-4a6a-8200-465507ac4e49-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847022 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5v6f8\" (UniqueName: \"kubernetes.io/projected/31533b6a-0dfa-4429-b7f9-097b52b009e6-kube-api-access-5v6f8\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847105 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6b038ccf-005f-4c1b-b9ec-d9db407f528a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847204 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvhcz\" (UniqueName: \"kubernetes.io/projected/7d419e0a-917c-410c-820b-ddfab808a3fe-kube-api-access-vvhcz\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847296 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847384 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-service-ca-bundle\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847458 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsgfz\" (UniqueName: \"kubernetes.io/projected/99515d23-15fa-4bae-be9e-c0a2e2c46c89-kube-api-access-gsgfz\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847535 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a88168e0-0728-4c47-8d89-5ece2fa293b9-secret-volume\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847609 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j8dz\" (UniqueName: \"kubernetes.io/projected/d2501fbc-7568-4a6a-8200-465507ac4e49-kube-api-access-8j8dz\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847692 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847764 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbst6\" (UniqueName: \"kubernetes.io/projected/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-kube-api-access-hbst6\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847832 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847910 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6p45s\" (UniqueName: \"kubernetes.io/projected/a88168e0-0728-4c47-8d89-5ece2fa293b9-kube-api-access-6p45s\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.847982 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e32e3dfe-0229-477b-8e6c-bd40314231ee-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zszbg\" (UID: \"e32e3dfe-0229-477b-8e6c-bd40314231ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848055 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-dir\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848134 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848203 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2f7dm\" (UniqueName: \"kubernetes.io/projected/6b038ccf-005f-4c1b-b9ec-d9db407f528a-kube-api-access-2f7dm\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848319 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrd87\" (UniqueName: \"kubernetes.io/projected/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-kube-api-access-mrd87\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848395 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-policies\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848448 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-dir\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848466 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31533b6a-0dfa-4429-b7f9-097b52b009e6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848261 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-service-ca-bundle\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848144 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6b038ccf-005f-4c1b-b9ec-d9db407f528a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848605 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-oauth-config\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848841 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4eeead7e-1859-447b-a93d-f68fb80a119e-trusted-ca\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.848919 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99515d23-15fa-4bae-be9e-c0a2e2c46c89-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849032 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849126 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcvvc\" (UniqueName: \"kubernetes.io/projected/e32e3dfe-0229-477b-8e6c-bd40314231ee-kube-api-access-fcvvc\") pod \"control-plane-machine-set-operator-78cbb6b69f-zszbg\" (UID: \"e32e3dfe-0229-477b-8e6c-bd40314231ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849207 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/31533b6a-0dfa-4429-b7f9-097b52b009e6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849329 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bef917d6-d516-41ca-ab40-e5d138f08a69-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849408 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31533b6a-0dfa-4429-b7f9-097b52b009e6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849479 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/17036398-eaeb-4ce2-9420-57cd9213ecbc-metrics-tls\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849567 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849645 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-trusted-ca\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849717 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-trusted-ca-bundle\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849803 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b038ccf-005f-4c1b-b9ec-d9db407f528a-proxy-tls\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849884 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849959 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgrpk\" (UniqueName: \"kubernetes.io/projected/f726edde-37ea-43cd-8c7c-16b1263647d2-kube-api-access-cgrpk\") pod \"multus-admission-controller-857f4d67dd-r92cx\" (UID: \"f726edde-37ea-43cd-8c7c-16b1263647d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850026 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-serving-cert\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850098 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850177 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850274 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99515d23-15fa-4bae-be9e-c0a2e2c46c89-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850344 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09b754f9-20f8-4d4f-ad25-8fed880f53bb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850421 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bef917d6-d516-41ca-ab40-e5d138f08a69-config\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850505 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850581 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lsllf\" (UniqueName: \"kubernetes.io/projected/41607592-fbbb-4003-b9eb-b11cbce16627-kube-api-access-lsllf\") pod \"downloads-7954f5f757-tscn9\" (UID: \"41607592-fbbb-4003-b9eb-b11cbce16627\") " pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850652 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-srv-cert\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850726 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bef917d6-d516-41ca-ab40-e5d138f08a69-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851566 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2501fbc-7568-4a6a-8200-465507ac4e49-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851615 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-serving-cert\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851637 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52fb4\" (UniqueName: \"kubernetes.io/projected/441f1126-5609-431e-bcb2-3e4b6da1b19a-kube-api-access-52fb4\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851660 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz5cl\" (UniqueName: \"kubernetes.io/projected/723f1344-9955-47c5-adca-3c5059f7a61f-kube-api-access-jz5cl\") pod \"migrator-59844c95c7-qd9lk\" (UID: \"723f1344-9955-47c5-adca-3c5059f7a61f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851679 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-console-config\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851697 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851715 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-oauth-serving-cert\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851743 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bef917d6-d516-41ca-ab40-e5d138f08a69-config\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851765 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4eeead7e-1859-447b-a93d-f68fb80a119e-metrics-tls\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851487 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/99515d23-15fa-4bae-be9e-c0a2e2c46c89-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.849912 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/441f1126-5609-431e-bcb2-3e4b6da1b19a-serving-cert\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.850145 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4eeead7e-1859-447b-a93d-f68fb80a119e-trusted-ca\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851926 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/e32e3dfe-0229-477b-8e6c-bd40314231ee-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-zszbg\" (UID: \"e32e3dfe-0229-477b-8e6c-bd40314231ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851940 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.851987 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54hzj\" (UniqueName: \"kubernetes.io/projected/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-kube-api-access-54hzj\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852028 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852055 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-serving-cert\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852189 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-config\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852268 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/441f1126-5609-431e-bcb2-3e4b6da1b19a-config\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852404 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a88168e0-0728-4c47-8d89-5ece2fa293b9-config-volume\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852442 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-config\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852467 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-service-ca\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852504 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4eeead7e-1859-447b-a93d-f68fb80a119e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852544 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852573 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09b754f9-20f8-4d4f-ad25-8fed880f53bb-config\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852596 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b754f9-20f8-4d4f-ad25-8fed880f53bb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852622 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxl7l\" (UniqueName: \"kubernetes.io/projected/17036398-eaeb-4ce2-9420-57cd9213ecbc-kube-api-access-pxl7l\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852657 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwqgp\" (UniqueName: \"kubernetes.io/projected/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-kube-api-access-mwqgp\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852679 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852702 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skdss\" (UniqueName: \"kubernetes.io/projected/4eeead7e-1859-447b-a93d-f68fb80a119e-kube-api-access-skdss\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852734 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f726edde-37ea-43cd-8c7c-16b1263647d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r92cx\" (UID: \"f726edde-37ea-43cd-8c7c-16b1263647d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852757 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w9kr\" (UniqueName: \"kubernetes.io/projected/95d741d8-41e2-4b8d-9fcd-b11f972345bf-kube-api-access-5w9kr\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852850 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/441f1126-5609-431e-bcb2-3e4b6da1b19a-config\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.852976 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-config\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.853104 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09b754f9-20f8-4d4f-ad25-8fed880f53bb-config\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.853614 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09b754f9-20f8-4d4f-ad25-8fed880f53bb-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.854147 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a88168e0-0728-4c47-8d89-5ece2fa293b9-config-volume\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.854379 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.854687 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-srv-cert\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.856247 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.856999 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/f726edde-37ea-43cd-8c7c-16b1263647d2-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-r92cx\" (UID: \"f726edde-37ea-43cd-8c7c-16b1263647d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.857302 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4eeead7e-1859-447b-a93d-f68fb80a119e-metrics-tls\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.858603 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a88168e0-0728-4c47-8d89-5ece2fa293b9-secret-volume\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.859282 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/99515d23-15fa-4bae-be9e-c0a2e2c46c89-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.861194 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bef917d6-d516-41ca-ab40-e5d138f08a69-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.864792 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.866759 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-serving-cert\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.885408 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.896079 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d2501fbc-7568-4a6a-8200-465507ac4e49-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.905420 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.908107 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d2501fbc-7568-4a6a-8200-465507ac4e49-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.925260 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.945354 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.964792 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.975866 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-serving-cert\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:42 crc kubenswrapper[4669]: I1210 15:22:42.986648 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.005447 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.014332 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-oauth-config\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.025302 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.034704 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-service-ca\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.053362 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.061716 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-trusted-ca-bundle\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.085950 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.094473 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/31533b6a-0dfa-4429-b7f9-097b52b009e6-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.105032 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.115960 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.125171 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.134169 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.146466 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.156105 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-serving-cert\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.164738 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.186181 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.205706 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.215162 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.226268 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.233368 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.246245 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.273194 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.288414 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.307788 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.312366 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.319434 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.325365 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.325892 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.332122 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.346844 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.353761 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-oauth-serving-cert\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.366396 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.371728 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.386779 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.393929 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-console-config\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.397675 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.407160 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.413115 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-policies\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.426500 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.445793 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.452754 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.474021 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.479564 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.485058 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.504824 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.533205 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.540043 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31533b6a-0dfa-4429-b7f9-097b52b009e6-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.546050 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.565543 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.574197 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-config\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.593061 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.602143 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-trusted-ca\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.605210 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.626069 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.635752 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.643628 4669 request.go:700] Waited for 1.007753582s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/secrets?fieldSelector=metadata.name%3Dmarketplace-operator-dockercfg-5nsgg&limit=500&resourceVersion=0 Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.645487 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.675706 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.685303 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.685642 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.706077 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.726177 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.745995 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.765407 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.785564 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.806169 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.826439 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.845881 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 10 15:22:43 crc kubenswrapper[4669]: E1210 15:22:43.850390 4669 secret.go:188] Couldn't get secret openshift-dns-operator/metrics-tls: failed to sync secret cache: timed out waiting for the condition Dec 10 15:22:43 crc kubenswrapper[4669]: E1210 15:22:43.850453 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/17036398-eaeb-4ce2-9420-57cd9213ecbc-metrics-tls podName:17036398-eaeb-4ce2-9420-57cd9213ecbc nodeName:}" failed. No retries permitted until 2025-12-10 15:22:44.350434531 +0000 UTC m=+138.267381158 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-tls" (UniqueName: "kubernetes.io/secret/17036398-eaeb-4ce2-9420-57cd9213ecbc-metrics-tls") pod "dns-operator-744455d44c-rx9fw" (UID: "17036398-eaeb-4ce2-9420-57cd9213ecbc") : failed to sync secret cache: timed out waiting for the condition Dec 10 15:22:43 crc kubenswrapper[4669]: E1210 15:22:43.850562 4669 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 10 15:22:43 crc kubenswrapper[4669]: E1210 15:22:43.850683 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6b038ccf-005f-4c1b-b9ec-d9db407f528a-proxy-tls podName:6b038ccf-005f-4c1b-b9ec-d9db407f528a nodeName:}" failed. No retries permitted until 2025-12-10 15:22:44.350655066 +0000 UTC m=+138.267601733 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/6b038ccf-005f-4c1b-b9ec-d9db407f528a-proxy-tls") pod "machine-config-controller-84d6567774-5mc9q" (UID: "6b038ccf-005f-4c1b-b9ec-d9db407f528a") : failed to sync secret cache: timed out waiting for the condition Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.865328 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.886097 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.905859 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.929814 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.945581 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.967009 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 10 15:22:43 crc kubenswrapper[4669]: I1210 15:22:43.985328 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.006786 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.026075 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.046745 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.066057 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.085884 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.106194 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.127777 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.145668 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.165307 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.185510 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.205670 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.226390 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.245429 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.265595 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.286033 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.305864 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.339990 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.347751 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.364964 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.378987 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/17036398-eaeb-4ce2-9420-57cd9213ecbc-metrics-tls\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.379364 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b038ccf-005f-4c1b-b9ec-d9db407f528a-proxy-tls\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.390724 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.390862 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/17036398-eaeb-4ce2-9420-57cd9213ecbc-metrics-tls\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.391022 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/6b038ccf-005f-4c1b-b9ec-d9db407f528a-proxy-tls\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.405432 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.426103 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.445963 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.465976 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.485091 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.506479 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.563185 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr9v5\" (UniqueName: \"kubernetes.io/projected/8fad0a45-7f20-4957-b7b6-c4ebf59d799c-kube-api-access-jr9v5\") pod \"package-server-manager-789f6589d5-vxbnr\" (UID: \"8fad0a45-7f20-4957-b7b6-c4ebf59d799c\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.581779 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gbkcv\" (UniqueName: \"kubernetes.io/projected/d43ac716-9d6c-4acd-9d08-94bb006885d4-kube-api-access-gbkcv\") pod \"ingress-canary-t4zfg\" (UID: \"d43ac716-9d6c-4acd-9d08-94bb006885d4\") " pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.600584 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khjvw\" (UniqueName: \"kubernetes.io/projected/a9b71401-32e6-4421-b992-155809accfe1-kube-api-access-khjvw\") pod \"openshift-apiserver-operator-796bbdcf4f-fb9l5\" (UID: \"a9b71401-32e6-4421-b992-155809accfe1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.623657 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2slg\" (UniqueName: \"kubernetes.io/projected/de92027c-22f4-4399-98e2-dd919dc0436d-kube-api-access-p2slg\") pod \"machine-config-server-bl72p\" (UID: \"de92027c-22f4-4399-98e2-dd919dc0436d\") " pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.639348 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdtrq\" (UniqueName: \"kubernetes.io/projected/439c2c2a-7b67-41a3-8544-8d2362d0db1b-kube-api-access-sdtrq\") pod \"apiserver-7bbb656c7d-t2f75\" (UID: \"439c2c2a-7b67-41a3-8544-8d2362d0db1b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.644111 4669 request.go:700] Waited for 1.882648225s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/serviceaccounts/route-controller-manager-sa/token Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.659493 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpqwc\" (UniqueName: \"kubernetes.io/projected/14ff27a3-e946-4b7c-a56c-d7da016d86df-kube-api-access-qpqwc\") pod \"route-controller-manager-6576b87f9c-r2g6m\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.679758 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5csv\" (UniqueName: \"kubernetes.io/projected/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-kube-api-access-m5csv\") pod \"controller-manager-879f6c89f-7j6zx\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.683453 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.691704 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.699829 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv69v\" (UniqueName: \"kubernetes.io/projected/f8b0b5c5-a647-4bae-867e-9745ae5ec534-kube-api-access-zv69v\") pod \"catalog-operator-68c6474976-4f5s9\" (UID: \"f8b0b5c5-a647-4bae-867e-9745ae5ec534\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.717233 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.720290 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sgc4h\" (UniqueName: \"kubernetes.io/projected/fdaed5ac-0cc4-49de-aa15-33cc2993afd3-kube-api-access-sgc4h\") pod \"cluster-samples-operator-665b6dd947-5kndl\" (UID: \"fdaed5ac-0cc4-49de-aa15-33cc2993afd3\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.727852 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.738529 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.742363 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bfw68\" (UniqueName: \"kubernetes.io/projected/372626a6-fd28-4cbb-93e5-e6520b30c3ce-kube-api-access-bfw68\") pod \"apiserver-76f77b778f-qhbj9\" (UID: \"372626a6-fd28-4cbb-93e5-e6520b30c3ce\") " pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.750473 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-bl72p" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.761953 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqwh9\" (UniqueName: \"kubernetes.io/projected/3339e1ea-db38-49df-a24e-88b4252274d2-kube-api-access-lqwh9\") pod \"machine-approver-56656f9798-854cf\" (UID: \"3339e1ea-db38-49df-a24e-88b4252274d2\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.762799 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.785411 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t4zfg" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.787005 4669 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.803839 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.806005 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.816122 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tk92f\" (UniqueName: \"kubernetes.io/projected/d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4-kube-api-access-tk92f\") pod \"machine-api-operator-5694c8668f-wvzzm\" (UID: \"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.825687 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.828044 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.849204 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 10 15:22:44 crc kubenswrapper[4669]: W1210 15:22:44.854566 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde92027c_22f4_4399_98e2_dd919dc0436d.slice/crio-b8a076cb5aee06688e5d890fe93088815021ee580750e2f230b8a2d7667339c4 WatchSource:0}: Error finding container b8a076cb5aee06688e5d890fe93088815021ee580750e2f230b8a2d7667339c4: Status 404 returned error can't find the container with id b8a076cb5aee06688e5d890fe93088815021ee580750e2f230b8a2d7667339c4 Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.868368 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 10 15:22:44 crc kubenswrapper[4669]: I1210 15:22:44.887611 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:44.947737 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5v6f8\" (UniqueName: \"kubernetes.io/projected/31533b6a-0dfa-4429-b7f9-097b52b009e6-kube-api-access-5v6f8\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:44.948106 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:44.950087 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvhcz\" (UniqueName: \"kubernetes.io/projected/7d419e0a-917c-410c-820b-ddfab808a3fe-kube-api-access-vvhcz\") pod \"console-f9d7485db-dsw2s\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.049643 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.050356 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.050893 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.061432 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j8dz\" (UniqueName: \"kubernetes.io/projected/d2501fbc-7568-4a6a-8200-465507ac4e49-kube-api-access-8j8dz\") pod \"kube-storage-version-migrator-operator-b67b599dd-brqbq\" (UID: \"d2501fbc-7568-4a6a-8200-465507ac4e49\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.066038 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbst6\" (UniqueName: \"kubernetes.io/projected/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-kube-api-access-hbst6\") pod \"marketplace-operator-79b997595-6c97z\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.071812 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2f7dm\" (UniqueName: \"kubernetes.io/projected/6b038ccf-005f-4c1b-b9ec-d9db407f528a-kube-api-access-2f7dm\") pod \"machine-config-controller-84d6567774-5mc9q\" (UID: \"6b038ccf-005f-4c1b-b9ec-d9db407f528a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.074032 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsgfz\" (UniqueName: \"kubernetes.io/projected/99515d23-15fa-4bae-be9e-c0a2e2c46c89-kube-api-access-gsgfz\") pod \"openshift-controller-manager-operator-756b6f6bc6-k572g\" (UID: \"99515d23-15fa-4bae-be9e-c0a2e2c46c89\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.081848 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrd87\" (UniqueName: \"kubernetes.io/projected/5ab4b3f4-4d2a-45d2-99ad-330e3624f0be-kube-api-access-mrd87\") pod \"authentication-operator-69f744f599-95c68\" (UID: \"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.085884 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6p45s\" (UniqueName: \"kubernetes.io/projected/a88168e0-0728-4c47-8d89-5ece2fa293b9-kube-api-access-6p45s\") pod \"collect-profiles-29422995-wlslr\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.087286 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.096347 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7j6zx"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.101867 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcvvc\" (UniqueName: \"kubernetes.io/projected/e32e3dfe-0229-477b-8e6c-bd40314231ee-kube-api-access-fcvvc\") pod \"control-plane-machine-set-operator-78cbb6b69f-zszbg\" (UID: \"e32e3dfe-0229-477b-8e6c-bd40314231ee\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.121054 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31533b6a-0dfa-4429-b7f9-097b52b009e6-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8q5g5\" (UID: \"31533b6a-0dfa-4429-b7f9-097b52b009e6\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:45 crc kubenswrapper[4669]: W1210 15:22:45.131291 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fad0a45_7f20_4957_b7b6_c4ebf59d799c.slice/crio-9af5b4b4d244c8b2b00c734e2ca2156fb8141cd3274039f63f8e079a9dd25dbf WatchSource:0}: Error finding container 9af5b4b4d244c8b2b00c734e2ca2156fb8141cd3274039f63f8e079a9dd25dbf: Status 404 returned error can't find the container with id 9af5b4b4d244c8b2b00c734e2ca2156fb8141cd3274039f63f8e079a9dd25dbf Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.137123 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bef917d6-d516-41ca-ab40-e5d138f08a69-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-q7jp7\" (UID: \"bef917d6-d516-41ca-ab40-e5d138f08a69\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.139769 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgrpk\" (UniqueName: \"kubernetes.io/projected/f726edde-37ea-43cd-8c7c-16b1263647d2-kube-api-access-cgrpk\") pod \"multus-admission-controller-857f4d67dd-r92cx\" (UID: \"f726edde-37ea-43cd-8c7c-16b1263647d2\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.143151 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.158237 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lsllf\" (UniqueName: \"kubernetes.io/projected/41607592-fbbb-4003-b9eb-b11cbce16627-kube-api-access-lsllf\") pod \"downloads-7954f5f757-tscn9\" (UID: \"41607592-fbbb-4003-b9eb-b11cbce16627\") " pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.170904 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.183469 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52fb4\" (UniqueName: \"kubernetes.io/projected/441f1126-5609-431e-bcb2-3e4b6da1b19a-kube-api-access-52fb4\") pod \"service-ca-operator-777779d784-f7zxn\" (UID: \"441f1126-5609-431e-bcb2-3e4b6da1b19a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.219095 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54hzj\" (UniqueName: \"kubernetes.io/projected/15b4e0f9-29d2-4e88-8588-45d668e7f1ad-kube-api-access-54hzj\") pod \"olm-operator-6b444d44fb-9jrwx\" (UID: \"15b4e0f9-29d2-4e88-8588-45d668e7f1ad\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.219665 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.220692 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.226111 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz5cl\" (UniqueName: \"kubernetes.io/projected/723f1344-9955-47c5-adca-3c5059f7a61f-kube-api-access-jz5cl\") pod \"migrator-59844c95c7-qd9lk\" (UID: \"723f1344-9955-47c5-adca-3c5059f7a61f\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.229718 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.235706 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.254111 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4eeead7e-1859-447b-a93d-f68fb80a119e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.262423 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.263823 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w9kr\" (UniqueName: \"kubernetes.io/projected/95d741d8-41e2-4b8d-9fcd-b11f972345bf-kube-api-access-5w9kr\") pod \"oauth-openshift-558db77b4-qbnt7\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.272441 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.274265 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.274729 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.276358 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" event={"ID":"3339e1ea-db38-49df-a24e-88b4252274d2","Type":"ContainerStarted","Data":"2cd45d532ca83ff1a9863a7b9498527fd08f7fe734c22f33f2eb6fe78f1b019a"} Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.277405 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" event={"ID":"439c2c2a-7b67-41a3-8544-8d2362d0db1b","Type":"ContainerStarted","Data":"8ac66c7e4ff50b6a236ace2714f674002ac56d8ce0fb463b7ed63dcc29f93021"} Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.278272 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" event={"ID":"8fad0a45-7f20-4957-b7b6-c4ebf59d799c","Type":"ContainerStarted","Data":"9af5b4b4d244c8b2b00c734e2ca2156fb8141cd3274039f63f8e079a9dd25dbf"} Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.285022 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwqgp\" (UniqueName: \"kubernetes.io/projected/11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f-kube-api-access-mwqgp\") pod \"console-operator-58897d9998-dzj9z\" (UID: \"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f\") " pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.281434 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.286417 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" event={"ID":"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b","Type":"ContainerStarted","Data":"3abcb0d7e34f39b8b8ebcde1a82e8c38de75d23c35ef2de5130e6302519b01c1"} Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.292740 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.300669 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.301789 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/09b754f9-20f8-4d4f-ad25-8fed880f53bb-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-p2s7k\" (UID: \"09b754f9-20f8-4d4f-ad25-8fed880f53bb\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.307124 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-bl72p" event={"ID":"de92027c-22f4-4399-98e2-dd919dc0436d","Type":"ContainerStarted","Data":"b8a076cb5aee06688e5d890fe93088815021ee580750e2f230b8a2d7667339c4"} Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.316630 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.320862 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skdss\" (UniqueName: \"kubernetes.io/projected/4eeead7e-1859-447b-a93d-f68fb80a119e-kube-api-access-skdss\") pod \"ingress-operator-5b745b69d9-hdzbf\" (UID: \"4eeead7e-1859-447b-a93d-f68fb80a119e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.328351 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.331172 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.339076 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.344810 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.350229 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxl7l\" (UniqueName: \"kubernetes.io/projected/17036398-eaeb-4ce2-9420-57cd9213ecbc-kube-api-access-pxl7l\") pod \"dns-operator-744455d44c-rx9fw\" (UID: \"17036398-eaeb-4ce2-9420-57cd9213ecbc\") " pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.350451 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.366605 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.386106 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.394627 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.592491 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.592730 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.594071 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-tls\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.597628 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-certificates\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.597771 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.597918 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm4qr\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-kube-api-access-cm4qr\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.598176 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-trusted-ca\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.598284 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-bound-sa-token\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.598500 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20977ed7-6db6-43e9-95a6-95280e2d8814-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.598574 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20977ed7-6db6-43e9-95a6-95280e2d8814-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: E1210 15:22:45.600344 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.100328568 +0000 UTC m=+140.017275295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:45 crc kubenswrapper[4669]: W1210 15:22:45.612393 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9b71401_32e6_4421_b992_155809accfe1.slice/crio-26b522106b251a5ba0516baba235543e619ea0a50826d7fd2d70d1bb52d0a071 WatchSource:0}: Error finding container 26b522106b251a5ba0516baba235543e619ea0a50826d7fd2d70d1bb52d0a071: Status 404 returned error can't find the container with id 26b522106b251a5ba0516baba235543e619ea0a50826d7fd2d70d1bb52d0a071 Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.617430 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.648447 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t4zfg"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.648947 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl"] Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.708150 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:45 crc kubenswrapper[4669]: E1210 15:22:45.709525 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.209510344 +0000 UTC m=+140.126456971 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712296 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-images\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712338 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4wtx\" (UniqueName: \"kubernetes.io/projected/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-kube-api-access-p4wtx\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712375 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4174704b-c801-44b5-9c3b-93dd6d8a3f52-signing-cabundle\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712415 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712442 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4174704b-c801-44b5-9c3b-93dd6d8a3f52-signing-key\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712463 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm4qr\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-kube-api-access-cm4qr\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712482 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/724e9f61-b03e-46ab-86d1-b41eaa447b25-serving-cert\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712503 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712519 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-webhook-cert\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712560 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9aed1358-8ab8-4b48-920c-d4e3377dcf40-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712582 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-stats-auth\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712599 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-trusted-ca\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712615 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-metrics-certs\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712633 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-bound-sa-token\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712655 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-tmpfs\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712679 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdsqg\" (UniqueName: \"kubernetes.io/projected/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-kube-api-access-bdsqg\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712701 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gtpk\" (UniqueName: \"kubernetes.io/projected/dd110da0-eba9-484f-9786-ccb6b7bcf88f-kube-api-access-4gtpk\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712724 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dfg4f\" (UniqueName: \"kubernetes.io/projected/e94faf4e-1a2e-4322-ac1b-d922b513b73d-kube-api-access-dfg4f\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712767 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20977ed7-6db6-43e9-95a6-95280e2d8814-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712793 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-ca\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712820 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20977ed7-6db6-43e9-95a6-95280e2d8814-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712839 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/724e9f61-b03e-46ab-86d1-b41eaa447b25-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712861 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xzv6\" (UniqueName: \"kubernetes.io/projected/4174704b-c801-44b5-9c3b-93dd6d8a3f52-kube-api-access-8xzv6\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712888 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-client\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.712999 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-tls\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713037 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd110da0-eba9-484f-9786-ccb6b7bcf88f-service-ca-bundle\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713058 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9aed1358-8ab8-4b48-920c-d4e3377dcf40-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713075 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-proxy-tls\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713092 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-default-certificate\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713110 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e94faf4e-1a2e-4322-ac1b-d922b513b73d-serving-cert\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713128 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-config\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713156 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aed1358-8ab8-4b48-920c-d4e3377dcf40-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713187 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hflkd\" (UniqueName: \"kubernetes.io/projected/724e9f61-b03e-46ab-86d1-b41eaa447b25-kube-api-access-hflkd\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: E1210 15:22:45.716329 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.216310206 +0000 UTC m=+140.133256823 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.718920 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-trusted-ca\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.713207 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-certificates\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.720405 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-apiservice-cert\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.720433 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-service-ca\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.720447 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20977ed7-6db6-43e9-95a6-95280e2d8814-ca-trust-extracted\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.725465 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-certificates\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.747993 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20977ed7-6db6-43e9-95a6-95280e2d8814-installation-pull-secrets\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.754591 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-tls\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.757110 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm4qr\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-kube-api-access-cm4qr\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821300 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821381 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-ca\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821408 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-plugins-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821430 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/724e9f61-b03e-46ab-86d1-b41eaa447b25-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821445 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npzpq\" (UniqueName: \"kubernetes.io/projected/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-kube-api-access-npzpq\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821497 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xzv6\" (UniqueName: \"kubernetes.io/projected/4174704b-c801-44b5-9c3b-93dd6d8a3f52-kube-api-access-8xzv6\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821520 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-client\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821535 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-registration-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821630 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd110da0-eba9-484f-9786-ccb6b7bcf88f-service-ca-bundle\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821646 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9aed1358-8ab8-4b48-920c-d4e3377dcf40-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821661 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-proxy-tls\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821675 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-default-certificate\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821691 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e94faf4e-1a2e-4322-ac1b-d922b513b73d-serving-cert\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821707 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-config\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821723 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aed1358-8ab8-4b48-920c-d4e3377dcf40-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821740 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-socket-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821765 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hflkd\" (UniqueName: \"kubernetes.io/projected/724e9f61-b03e-46ab-86d1-b41eaa447b25-kube-api-access-hflkd\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821783 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-apiservice-cert\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821808 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-service-ca\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821824 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-config-volume\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.821858 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-images\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822067 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4wtx\" (UniqueName: \"kubernetes.io/projected/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-kube-api-access-p4wtx\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822086 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4174704b-c801-44b5-9c3b-93dd6d8a3f52-signing-cabundle\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822104 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-csi-data-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822151 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4174704b-c801-44b5-9c3b-93dd6d8a3f52-signing-key\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822169 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/724e9f61-b03e-46ab-86d1-b41eaa447b25-serving-cert\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822184 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822206 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-webhook-cert\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822247 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-mountpoint-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822277 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9aed1358-8ab8-4b48-920c-d4e3377dcf40-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822325 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-stats-auth\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822343 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8797w\" (UniqueName: \"kubernetes.io/projected/68382e07-892e-4adf-a43b-de6b61754e76-kube-api-access-8797w\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822381 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-metrics-certs\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822404 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-metrics-tls\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822419 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-tmpfs\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822435 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gtpk\" (UniqueName: \"kubernetes.io/projected/dd110da0-eba9-484f-9786-ccb6b7bcf88f-kube-api-access-4gtpk\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822451 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dfg4f\" (UniqueName: \"kubernetes.io/projected/e94faf4e-1a2e-4322-ac1b-d922b513b73d-kube-api-access-dfg4f\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.822480 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdsqg\" (UniqueName: \"kubernetes.io/projected/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-kube-api-access-bdsqg\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: E1210 15:22:45.824543 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.324519238 +0000 UTC m=+140.241465855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.825101 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-ca\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.826550 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd110da0-eba9-484f-9786-ccb6b7bcf88f-service-ca-bundle\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.826811 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/724e9f61-b03e-46ab-86d1-b41eaa447b25-available-featuregates\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.830734 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-service-ca\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.837643 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-images\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.838956 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9aed1358-8ab8-4b48-920c-d4e3377dcf40-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.839051 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/4174704b-c801-44b5-9c3b-93dd6d8a3f52-signing-cabundle\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.839582 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e94faf4e-1a2e-4322-ac1b-d922b513b73d-config\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.839678 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-auth-proxy-config\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.845399 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-tmpfs\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.849757 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-webhook-cert\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.854701 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-metrics-certs\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.860678 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-bound-sa-token\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.867172 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e94faf4e-1a2e-4322-ac1b-d922b513b73d-etcd-client\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927060 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-plugins-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927399 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npzpq\" (UniqueName: \"kubernetes.io/projected/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-kube-api-access-npzpq\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927428 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-registration-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927507 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-socket-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927545 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-config-volume\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927567 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-csi-data-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927589 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927636 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-mountpoint-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927677 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8797w\" (UniqueName: \"kubernetes.io/projected/68382e07-892e-4adf-a43b-de6b61754e76-kube-api-access-8797w\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.927698 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-metrics-tls\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.928981 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-config-volume\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.929438 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-plugins-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.929575 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-csi-data-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.929657 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-mountpoint-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.929709 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-socket-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.930409 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xzv6\" (UniqueName: \"kubernetes.io/projected/4174704b-c801-44b5-9c3b-93dd6d8a3f52-kube-api-access-8xzv6\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.930456 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9aed1358-8ab8-4b48-920c-d4e3377dcf40-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.933776 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/68382e07-892e-4adf-a43b-de6b61754e76-registration-dir\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:45 crc kubenswrapper[4669]: E1210 15:22:45.933916 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.433892419 +0000 UTC m=+140.350839046 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.942029 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/4174704b-c801-44b5-9c3b-93dd6d8a3f52-signing-key\") pod \"service-ca-9c57cc56f-g997m\" (UID: \"4174704b-c801-44b5-9c3b-93dd6d8a3f52\") " pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.949210 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e94faf4e-1a2e-4322-ac1b-d922b513b73d-serving-cert\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.952361 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4wtx\" (UniqueName: \"kubernetes.io/projected/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-kube-api-access-p4wtx\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.952551 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-stats-auth\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:45 crc kubenswrapper[4669]: I1210 15:22:45.969555 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-apiservice-cert\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.017701 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-g997m" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.028734 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.029340 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.529324176 +0000 UTC m=+140.446270803 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.132630 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.133349 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.63333689 +0000 UTC m=+140.550283517 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.135718 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/dd110da0-eba9-484f-9786-ccb6b7bcf88f-default-certificate\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.136264 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/7599428a-06ae-462c-bb3b-bb6d9ceae3b0-proxy-tls\") pod \"machine-config-operator-74547568cd-2rh6l\" (UID: \"7599428a-06ae-462c-bb3b-bb6d9ceae3b0\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.150417 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdsqg\" (UniqueName: \"kubernetes.io/projected/6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c-kube-api-access-bdsqg\") pod \"packageserver-d55dfcdfc-jt6sh\" (UID: \"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.157847 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gtpk\" (UniqueName: \"kubernetes.io/projected/dd110da0-eba9-484f-9786-ccb6b7bcf88f-kube-api-access-4gtpk\") pod \"router-default-5444994796-b6x7l\" (UID: \"dd110da0-eba9-484f-9786-ccb6b7bcf88f\") " pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.165075 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-metrics-tls\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.170747 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9aed1358-8ab8-4b48-920c-d4e3377dcf40-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-q2hb9\" (UID: \"9aed1358-8ab8-4b48-920c-d4e3377dcf40\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.170828 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dfg4f\" (UniqueName: \"kubernetes.io/projected/e94faf4e-1a2e-4322-ac1b-d922b513b73d-kube-api-access-dfg4f\") pod \"etcd-operator-b45778765-bqznv\" (UID: \"e94faf4e-1a2e-4322-ac1b-d922b513b73d\") " pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.180034 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npzpq\" (UniqueName: \"kubernetes.io/projected/fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a-kube-api-access-npzpq\") pod \"dns-default-vzc96\" (UID: \"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a\") " pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.180670 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8797w\" (UniqueName: \"kubernetes.io/projected/68382e07-892e-4adf-a43b-de6b61754e76-kube-api-access-8797w\") pod \"csi-hostpathplugin-mjz6f\" (UID: \"68382e07-892e-4adf-a43b-de6b61754e76\") " pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.182961 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hflkd\" (UniqueName: \"kubernetes.io/projected/724e9f61-b03e-46ab-86d1-b41eaa447b25-kube-api-access-hflkd\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.185150 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/724e9f61-b03e-46ab-86d1-b41eaa447b25-serving-cert\") pod \"openshift-config-operator-7777fb866f-vbwnj\" (UID: \"724e9f61-b03e-46ab-86d1-b41eaa447b25\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.236387 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.236665 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.736646905 +0000 UTC m=+140.653593532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.248897 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.259279 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.288254 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.292064 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.293719 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.337475 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.338170 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.838156588 +0000 UTC m=+140.755103205 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.342595 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.343117 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.367798 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" event={"ID":"14ff27a3-e946-4b7c-a56c-d7da016d86df","Type":"ContainerStarted","Data":"d5c76533392942c2943852799d8fef766153fe4405898523d0800afaea3b10eb"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.443073 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.443200 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.943180595 +0000 UTC m=+140.860127212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.443509 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.443940 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:46.943929252 +0000 UTC m=+140.860875879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.452194 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.479942 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" event={"ID":"f8b0b5c5-a647-4bae-867e-9745ae5ec534","Type":"ContainerStarted","Data":"740119cd1f3a660918304b2403946e168d791d8c5fdac5981e7a7555fe9aee55"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.544277 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.544723 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.044706027 +0000 UTC m=+140.961652654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.599173 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" event={"ID":"3339e1ea-db38-49df-a24e-88b4252274d2","Type":"ContainerStarted","Data":"00f751d3d5f49194f2ac6ff7fd245d41addc4871be5362fbc67c1dc4fc15f976"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.622726 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" event={"ID":"8fad0a45-7f20-4957-b7b6-c4ebf59d799c","Type":"ContainerStarted","Data":"561df3bc222207577bc0a9f608494aa32bb8c3df045be142e9cfac6ae1c76254"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.644977 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" event={"ID":"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b","Type":"ContainerStarted","Data":"616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.645270 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.645703 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.145686039 +0000 UTC m=+141.062632666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.648148 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" event={"ID":"a9b71401-32e6-4421-b992-155809accfe1","Type":"ContainerStarted","Data":"26b522106b251a5ba0516baba235543e619ea0a50826d7fd2d70d1bb52d0a071"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.649975 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t4zfg" event={"ID":"d43ac716-9d6c-4acd-9d08-94bb006885d4","Type":"ContainerStarted","Data":"574b571b94b681ca77671ede135f5cb61c864b152e9137fe665052ea71824b43"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.650206 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qhbj9"] Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.651654 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-bl72p" event={"ID":"de92027c-22f4-4399-98e2-dd919dc0436d","Type":"ContainerStarted","Data":"91bee954326cd4d27ac2f232fdbcba6e51a59856f873c93a7a8d198d6b1fb107"} Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.653006 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-95c68"] Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.720259 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dsw2s"] Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.758479 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.759430 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.259413563 +0000 UTC m=+141.176360190 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.759568 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.761627 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.261619155 +0000 UTC m=+141.178565782 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.861076 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.861505 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.361490849 +0000 UTC m=+141.278437476 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.864114 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q"] Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.918729 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-wvzzm"] Dec 10 15:22:46 crc kubenswrapper[4669]: I1210 15:22:46.962100 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:46 crc kubenswrapper[4669]: E1210 15:22:46.962575 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.462561932 +0000 UTC m=+141.379508559 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.063739 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.064586 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.564565986 +0000 UTC m=+141.481512613 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: W1210 15:22:47.130244 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3e994d0_cbc4_4a0f_ab58_ed8f5ba521b4.slice/crio-6f2daaa4e779c716924c650e458c9e6851a91ef36783fd80d99297d9c2fd1374 WatchSource:0}: Error finding container 6f2daaa4e779c716924c650e458c9e6851a91ef36783fd80d99297d9c2fd1374: Status 404 returned error can't find the container with id 6f2daaa4e779c716924c650e458c9e6851a91ef36783fd80d99297d9c2fd1374 Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.171035 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.171425 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.671408406 +0000 UTC m=+141.588355033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.224122 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-bl72p" podStartSLOduration=5.224047813 podStartE2EDuration="5.224047813s" podCreationTimestamp="2025-12-10 15:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:47.220752404 +0000 UTC m=+141.137699041" watchObservedRunningTime="2025-12-10 15:22:47.224047813 +0000 UTC m=+141.140994440" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.272056 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.272273 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.772247803 +0000 UTC m=+141.689194430 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.272669 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.273037 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.773025922 +0000 UTC m=+141.689972549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.375408 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.376106 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.876088842 +0000 UTC m=+141.793035469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.483457 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.483807 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:47.983795152 +0000 UTC m=+141.900741779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.804328 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.854333 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.354285145 +0000 UTC m=+142.271231772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.894573 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-b6x7l" event={"ID":"dd110da0-eba9-484f-9786-ccb6b7bcf88f","Type":"ContainerStarted","Data":"9bc0a18db952b9dd7fb147dec8a50748d29cfc23befcd8dcd6607b54af34ab63"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.898857 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" event={"ID":"6b038ccf-005f-4c1b-b9ec-d9db407f528a","Type":"ContainerStarted","Data":"609bfc83aa084e719164fe8998b793f24d33e11950b62adca5d72762172848db"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.901309 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" event={"ID":"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be","Type":"ContainerStarted","Data":"cc0afe463efb14964db07b3b1321c92e607e8cfe1dca2faf2ad0fbf567448029"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.902843 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" event={"ID":"14ff27a3-e946-4b7c-a56c-d7da016d86df","Type":"ContainerStarted","Data":"f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.906285 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:47 crc kubenswrapper[4669]: E1210 15:22:47.906629 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.406614994 +0000 UTC m=+142.323561621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.911715 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.911755 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsw2s" event={"ID":"7d419e0a-917c-410c-820b-ddfab808a3fe","Type":"ContainerStarted","Data":"e58d099669588d23862cedf987de159f726897f5c67d5cb082aaa2fad933b1b9"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.914723 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" event={"ID":"f8b0b5c5-a647-4bae-867e-9745ae5ec534","Type":"ContainerStarted","Data":"3ad09931e9d3c835b7a03017f17879fea569741b089ac842f6f73246c13274f3"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.916393 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.921869 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" event={"ID":"8fad0a45-7f20-4957-b7b6-c4ebf59d799c","Type":"ContainerStarted","Data":"17663f3c8b55ce863c3569a85800aec96bad4a1a43c9a555b5a5134c02bdb20f"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.922593 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.934439 4669 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-r2g6m container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.934752 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.934868 4669 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-4f5s9 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.934954 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" podUID="f8b0b5c5-a647-4bae-867e-9745ae5ec534" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.943999 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" event={"ID":"a9b71401-32e6-4421-b992-155809accfe1","Type":"ContainerStarted","Data":"560cad7bd6fad1cfb1e8f50e049f6aa1456eec0f7a641eb0bb3e2f60131512a6"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.962592 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t4zfg" event={"ID":"d43ac716-9d6c-4acd-9d08-94bb006885d4","Type":"ContainerStarted","Data":"229c6302ac7243ae127ab03380c5d95c2494879b9ea916e96d74f766b9a0324a"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.968162 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" event={"ID":"372626a6-fd28-4cbb-93e5-e6520b30c3ce","Type":"ContainerStarted","Data":"227ce419a5fb858c40a53891ad5250d02ed6a7ea243a5eed9aa5533133d6fda7"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.970717 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" event={"ID":"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4","Type":"ContainerStarted","Data":"6f2daaa4e779c716924c650e458c9e6851a91ef36783fd80d99297d9c2fd1374"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.973051 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" event={"ID":"fdaed5ac-0cc4-49de-aa15-33cc2993afd3","Type":"ContainerStarted","Data":"90cd69ad91ed8e6b5494a7530461871c1d47298fd6fa16aa117807f8f5e9d74b"} Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.975141 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.987611 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:22:47 crc kubenswrapper[4669]: I1210 15:22:47.997184 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" podStartSLOduration=120.997158265 podStartE2EDuration="2m0.997158265s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:47.987947755 +0000 UTC m=+141.904894402" watchObservedRunningTime="2025-12-10 15:22:47.997158265 +0000 UTC m=+141.914104892" Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.008428 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.010022 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.510006622 +0000 UTC m=+142.426953249 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.012103 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" podStartSLOduration=120.012077151 podStartE2EDuration="2m0.012077151s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:48.007189785 +0000 UTC m=+141.924136412" watchObservedRunningTime="2025-12-10 15:22:48.012077151 +0000 UTC m=+141.929023778" Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.126233 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.126944 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.626925612 +0000 UTC m=+142.543872239 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.184182 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" podStartSLOduration=120.184165518 podStartE2EDuration="2m0.184165518s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:48.129011082 +0000 UTC m=+142.045957739" watchObservedRunningTime="2025-12-10 15:22:48.184165518 +0000 UTC m=+142.101112145" Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.190812 4669 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod439c2c2a_7b67_41a3_8544_8d2362d0db1b.slice/crio-52bc940a55cc07a005345ce8c140b11f8c02a30fab4d971a6df4d7782dfe6c49.scope\": RecentStats: unable to find data in memory cache]" Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.234768 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.235161 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.735139486 +0000 UTC m=+142.652086123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.343601 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.343995 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.843980224 +0000 UTC m=+142.760926851 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.365505 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-fb9l5" podStartSLOduration=122.365484667 podStartE2EDuration="2m2.365484667s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:48.18674566 +0000 UTC m=+142.103692287" watchObservedRunningTime="2025-12-10 15:22:48.365484667 +0000 UTC m=+142.282431294" Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.447093 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.447402 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:48.947382091 +0000 UTC m=+142.864328728 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.459089 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" podStartSLOduration=120.45906487 podStartE2EDuration="2m0.45906487s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:48.456489689 +0000 UTC m=+142.373436316" watchObservedRunningTime="2025-12-10 15:22:48.45906487 +0000 UTC m=+142.376011497" Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.520808 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-t4zfg" podStartSLOduration=6.520788983 podStartE2EDuration="6.520788983s" podCreationTimestamp="2025-12-10 15:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:48.519427821 +0000 UTC m=+142.436374438" watchObservedRunningTime="2025-12-10 15:22:48.520788983 +0000 UTC m=+142.437735610" Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.555721 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.556055 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.056042765 +0000 UTC m=+142.972989392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.658144 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.658539 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.15851995 +0000 UTC m=+143.075466577 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.760661 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.762372 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.262356919 +0000 UTC m=+143.179303546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.829305 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dzj9z"] Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.844886 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr"] Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.862248 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.864908 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5"] Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.867729 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.367693764 +0000 UTC m=+143.284640391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:48 crc kubenswrapper[4669]: I1210 15:22:48.973245 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:48 crc kubenswrapper[4669]: E1210 15:22:48.993690 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.49365546 +0000 UTC m=+143.410602087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.030358 4669 generic.go:334] "Generic (PLEG): container finished" podID="439c2c2a-7b67-41a3-8544-8d2362d0db1b" containerID="52bc940a55cc07a005345ce8c140b11f8c02a30fab4d971a6df4d7782dfe6c49" exitCode=0 Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.031428 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" event={"ID":"439c2c2a-7b67-41a3-8544-8d2362d0db1b","Type":"ContainerDied","Data":"52bc940a55cc07a005345ce8c140b11f8c02a30fab4d971a6df4d7782dfe6c49"} Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.065502 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" event={"ID":"a88168e0-0728-4c47-8d89-5ece2fa293b9","Type":"ContainerStarted","Data":"3aa93495eb5b1f976d0d9349ab306d97a9376bef552fc9f3b05c0b1953e5db91"} Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.067430 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qbnt7"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.071192 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.076075 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.076436 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.576417955 +0000 UTC m=+143.493364582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.091399 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" event={"ID":"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f","Type":"ContainerStarted","Data":"44edde8b2ac20558ada9ce4b28dfc4b87a9fcfe92e54173af233adfbf9864809"} Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.100355 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" event={"ID":"fdaed5ac-0cc4-49de-aa15-33cc2993afd3","Type":"ContainerStarted","Data":"0e11b09ce53bb2ff38a0df65671dd79b301e18a3aa495990d60908b5e6e1e5ca"} Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.104905 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" event={"ID":"31533b6a-0dfa-4429-b7f9-097b52b009e6","Type":"ContainerStarted","Data":"caa6947c11f8b07b74dd97deed8fa1145736465e959828c95b2513db16d2f35f"} Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.105480 4669 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-r2g6m container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.105534 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.112823 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4f5s9" Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.152824 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.152882 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.177928 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.181285 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.681274108 +0000 UTC m=+143.598220735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.258451 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6c97z"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.270323 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rx9fw"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.279508 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.279944 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.779928282 +0000 UTC m=+143.696874909 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.320270 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf"] Dec 10 15:22:49 crc kubenswrapper[4669]: W1210 15:22:49.325208 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99515d23_15fa_4bae_be9e_c0a2e2c46c89.slice/crio-09467c7343eb0332e2f23a34801141b490815dfd9068c10ea49e05ed76a18afb WatchSource:0}: Error finding container 09467c7343eb0332e2f23a34801141b490815dfd9068c10ea49e05ed76a18afb: Status 404 returned error can't find the container with id 09467c7343eb0332e2f23a34801141b490815dfd9068c10ea49e05ed76a18afb Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.375424 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-r92cx"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.382269 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.382749 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.882735436 +0000 UTC m=+143.799682063 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.414069 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.417089 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.470693 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.483921 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.484304 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:49.98428036 +0000 UTC m=+143.901234707 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.540123 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.543874 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.548293 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-tscn9"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.548369 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-mjz6f"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.555682 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.565784 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-bqznv"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.567964 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.584625 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.585890 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.586414 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.086398357 +0000 UTC m=+144.003344984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.608904 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.634223 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-g997m"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.687790 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.687996 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.187954851 +0000 UTC m=+144.104901478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.688391 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.688846 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.188830662 +0000 UTC m=+144.105777289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.790008 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.790392 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.290378036 +0000 UTC m=+144.207324663 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.811266 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-vzc96"] Dec 10 15:22:49 crc kubenswrapper[4669]: I1210 15:22:49.910012 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:49 crc kubenswrapper[4669]: E1210 15:22:49.910597 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.410580294 +0000 UTC m=+144.327526921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.013683 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.014265 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.514249179 +0000 UTC m=+144.431195806 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.014560 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.014967 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.514958696 +0000 UTC m=+144.431905323 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.117702 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.118106 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.618089088 +0000 UTC m=+144.535035715 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.123822 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tscn9" event={"ID":"41607592-fbbb-4003-b9eb-b11cbce16627","Type":"ContainerStarted","Data":"0c802bc9a8d8d58a6c77e9fd5d442246bae9de7983612f1ff45f5c237325ca00"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.125499 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" event={"ID":"7599428a-06ae-462c-bb3b-bb6d9ceae3b0","Type":"ContainerStarted","Data":"0b9cb004618c1e8aeacd2c5e5476e3fe1f2e86fac99b313875bb8c6476cb5a6f"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.133833 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" event={"ID":"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c","Type":"ContainerStarted","Data":"8351efaaf0e06283b73b9fcd52256a32b9cb43087ffa37fac813162cf931fae8"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.137952 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" event={"ID":"99515d23-15fa-4bae-be9e-c0a2e2c46c89","Type":"ContainerStarted","Data":"09467c7343eb0332e2f23a34801141b490815dfd9068c10ea49e05ed76a18afb"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.139555 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" event={"ID":"95d741d8-41e2-4b8d-9fcd-b11f972345bf","Type":"ContainerStarted","Data":"e5a686627eb5668f623600eed2191a11fea86d2d907d1f815a8c81dccb714091"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.141804 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" event={"ID":"9aed1358-8ab8-4b48-920c-d4e3377dcf40","Type":"ContainerStarted","Data":"cb04d7c1016e108ef8d2103599f0a09ba9fddc4cd5b0f1df59ca4aef66557b2d"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.142824 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" event={"ID":"09b754f9-20f8-4d4f-ad25-8fed880f53bb","Type":"ContainerStarted","Data":"c35718e44dfd986e47fb0bfc7b6a10c09b1c5fc57f91bf2dca0ab20acf31fab6"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.144241 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" event={"ID":"6b038ccf-005f-4c1b-b9ec-d9db407f528a","Type":"ContainerStarted","Data":"4d11748268260bf4c03d84e82edcb665fd75e3b1025d62d6e95a5df5c41914de"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.144859 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" event={"ID":"724e9f61-b03e-46ab-86d1-b41eaa447b25","Type":"ContainerStarted","Data":"3aefa0e7903ebc3c93851789d0d61c1d81e78bd0f457b434db7c1b96cf6c9afd"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.146186 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" event={"ID":"a40f1577-aae9-4e5c-bfdb-21dd1a00445d","Type":"ContainerStarted","Data":"7714cb8e90ddd01a659f693ed5e441caa247782f18113218282bda139e3f3684"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.152005 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" event={"ID":"a88168e0-0728-4c47-8d89-5ece2fa293b9","Type":"ContainerStarted","Data":"af360676c0f5d8565e7270fc744610842736e7959a8505cc46a0a9c6b6d7ca83"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.168987 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" event={"ID":"3339e1ea-db38-49df-a24e-88b4252274d2","Type":"ContainerStarted","Data":"f63400f27f9f7776f9d538560653665e651d885a545c8080971ee23eddd6a85f"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.174342 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" event={"ID":"e32e3dfe-0229-477b-8e6c-bd40314231ee","Type":"ContainerStarted","Data":"e1b5cfc6d858926c89351f6cba287624c5664f090bb752c8ac351d424844c69e"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.191479 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" podStartSLOduration=124.191462559 podStartE2EDuration="2m4.191462559s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.191056019 +0000 UTC m=+144.108002646" watchObservedRunningTime="2025-12-10 15:22:50.191462559 +0000 UTC m=+144.108409186" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.195362 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" event={"ID":"e94faf4e-1a2e-4322-ac1b-d922b513b73d","Type":"ContainerStarted","Data":"b92d3df0835de9d52c8d2bf498ddb4417c7f607bbd4e1e52095fcf9ab2ed5c63"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.199920 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" event={"ID":"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4","Type":"ContainerStarted","Data":"9d9e86a79c45096638838a421888711282b8e8cb81ad1fc618daac71c88a2595"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.205783 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" event={"ID":"441f1126-5609-431e-bcb2-3e4b6da1b19a","Type":"ContainerStarted","Data":"eb0e341bcb897004711236fb2e5df35a29472eccb60d33930d0a18c23d60d951"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.214117 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" event={"ID":"4eeead7e-1859-447b-a93d-f68fb80a119e","Type":"ContainerStarted","Data":"b988057652fe0500d6a3f5f92b4cde68db4561a1947349358620c8151e98aadd"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.215816 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" event={"ID":"5ab4b3f4-4d2a-45d2-99ad-330e3624f0be","Type":"ContainerStarted","Data":"adc3f53a37fa89b21b970e5b596b7833cb2179027dd846ab07762473feda1501"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.218642 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.221197 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" event={"ID":"11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f","Type":"ContainerStarted","Data":"82941d1daf96d794104c92f9eff986802071600439c001d8e26a192ac5ac6b48"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.222441 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.222482 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.722466959 +0000 UTC m=+144.639413586 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.246860 4669 patch_prober.go:28] interesting pod/console-operator-58897d9998-dzj9z container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.246921 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" podUID="11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.270204 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-854cf" podStartSLOduration=124.270185577 podStartE2EDuration="2m4.270185577s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.220920272 +0000 UTC m=+144.137866899" watchObservedRunningTime="2025-12-10 15:22:50.270185577 +0000 UTC m=+144.187132204" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.270360 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-95c68" podStartSLOduration=124.270355621 podStartE2EDuration="2m4.270355621s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.250910467 +0000 UTC m=+144.167857094" watchObservedRunningTime="2025-12-10 15:22:50.270355621 +0000 UTC m=+144.187302248" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.277097 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" event={"ID":"bef917d6-d516-41ca-ab40-e5d138f08a69","Type":"ContainerStarted","Data":"924326d87ac1dda846301018b3a3071952ac80dc8078b1241450d67e22480337"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.300854 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" event={"ID":"fdaed5ac-0cc4-49de-aa15-33cc2993afd3","Type":"ContainerStarted","Data":"af3d42385215a3ba2e3a3b05983f6b519a093e8c47ef731b7d02c3eeb5c0087e"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.304942 4669 generic.go:334] "Generic (PLEG): container finished" podID="372626a6-fd28-4cbb-93e5-e6520b30c3ce" containerID="def5cb7f78a4be270e0f3a951313fe7b1af1b12a74a580e5f91bb235cf683aa3" exitCode=0 Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.305021 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" event={"ID":"372626a6-fd28-4cbb-93e5-e6520b30c3ce","Type":"ContainerDied","Data":"def5cb7f78a4be270e0f3a951313fe7b1af1b12a74a580e5f91bb235cf683aa3"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.309450 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g997m" event={"ID":"4174704b-c801-44b5-9c3b-93dd6d8a3f52","Type":"ContainerStarted","Data":"a8c430e8f6b3f00746c835c0ca6275ccba71354e5780d5f98cc3698749b6a8b4"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.311713 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsw2s" event={"ID":"7d419e0a-917c-410c-820b-ddfab808a3fe","Type":"ContainerStarted","Data":"7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.314339 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" event={"ID":"68382e07-892e-4adf-a43b-de6b61754e76","Type":"ContainerStarted","Data":"265839796a58aff7e5b85486226f997bc92577c1dc23697b9e2087c15f2a0d83"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.315488 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" event={"ID":"f726edde-37ea-43cd-8c7c-16b1263647d2","Type":"ContainerStarted","Data":"b3ac31086e50530598213e09186cbd6e32b4d0f632b99ca6107c98c673579c0e"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.316532 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" event={"ID":"15b4e0f9-29d2-4e88-8588-45d668e7f1ad","Type":"ContainerStarted","Data":"377c899e91897a107b3487d88e590c6c2f5bff0ef8f6f4bfc49183a5ce0e2cb2"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.319346 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-b6x7l" event={"ID":"dd110da0-eba9-484f-9786-ccb6b7bcf88f","Type":"ContainerStarted","Data":"467682c2c6a2865b57e7416b3d1f0eeebd6666621f26039dbd98ac0abc7ef43e"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.327565 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" event={"ID":"723f1344-9955-47c5-adca-3c5059f7a61f","Type":"ContainerStarted","Data":"6cdb87a7537f3389dfe10a29ea103061434ccd436dce90abb319ee548f50730c"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.328478 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.329913 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.829896813 +0000 UTC m=+144.746843440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.338196 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" podStartSLOduration=123.338175221 podStartE2EDuration="2m3.338175221s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.296887405 +0000 UTC m=+144.213834032" watchObservedRunningTime="2025-12-10 15:22:50.338175221 +0000 UTC m=+144.255121848" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.338826 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" podStartSLOduration=123.338822236 podStartE2EDuration="2m3.338822236s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.324163796 +0000 UTC m=+144.241110423" watchObservedRunningTime="2025-12-10 15:22:50.338822236 +0000 UTC m=+144.255768863" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.366950 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" event={"ID":"31533b6a-0dfa-4429-b7f9-097b52b009e6","Type":"ContainerStarted","Data":"ac7e188264334718fb8c01f2464d4eb4161620346f24a212f1405805db349212"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.383964 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" event={"ID":"d2501fbc-7568-4a6a-8200-465507ac4e49","Type":"ContainerStarted","Data":"189bb2ba37e4cb775b6f96cae66a3a5f812066aa83a25ade42089fb056d2aab1"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.436287 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.438127 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:50.938112366 +0000 UTC m=+144.855058993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.466870 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-5kndl" podStartSLOduration=123.466847782 podStartE2EDuration="2m3.466847782s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.466531814 +0000 UTC m=+144.383478441" watchObservedRunningTime="2025-12-10 15:22:50.466847782 +0000 UTC m=+144.383794409" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.484458 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vzc96" event={"ID":"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a","Type":"ContainerStarted","Data":"9df1886814dc7788d88ed1ba9e167223e0ce15ea73cc53beee8234192471bc42"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.509998 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" event={"ID":"17036398-eaeb-4ce2-9420-57cd9213ecbc","Type":"ContainerStarted","Data":"ad2c903ba1dc71932a9c12c02b4d501f4a4b38d740cf70132c68cebd75b80043"} Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.537145 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-b6x7l" podStartSLOduration=123.537128619 podStartE2EDuration="2m3.537128619s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.534925126 +0000 UTC m=+144.451871753" watchObservedRunningTime="2025-12-10 15:22:50.537128619 +0000 UTC m=+144.454075246" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.538717 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.539209 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.039196248 +0000 UTC m=+144.956142865 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.610694 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dsw2s" podStartSLOduration=123.610670394 podStartE2EDuration="2m3.610670394s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.594652612 +0000 UTC m=+144.511599239" watchObservedRunningTime="2025-12-10 15:22:50.610670394 +0000 UTC m=+144.527617021" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.640248 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.644978 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.144958873 +0000 UTC m=+145.061905490 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.742848 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8q5g5" podStartSLOduration=123.742829669 podStartE2EDuration="2m3.742829669s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:50.61257356 +0000 UTC m=+144.529520187" watchObservedRunningTime="2025-12-10 15:22:50.742829669 +0000 UTC m=+144.659776296" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.744311 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h89pm"] Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.745148 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.747818 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.749124 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.249095208 +0000 UTC m=+145.166041835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.752569 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.766209 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h89pm"] Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.850057 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-catalog-content\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.850119 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-utilities\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.850186 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.850209 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q2fx\" (UniqueName: \"kubernetes.io/projected/0f3441be-4b11-4f4a-b072-7ca1894c5f86-kube-api-access-2q2fx\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.850525 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.350514269 +0000 UTC m=+145.267460896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.936199 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-lb6d9"] Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.937485 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.940648 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.951042 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.951209 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-catalog-content\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.951269 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-utilities\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.951350 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q2fx\" (UniqueName: \"kubernetes.io/projected/0f3441be-4b11-4f4a-b072-7ca1894c5f86-kube-api-access-2q2fx\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: E1210 15:22:50.951759 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.451741585 +0000 UTC m=+145.368688212 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.952093 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-catalog-content\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.952337 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-utilities\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:50 crc kubenswrapper[4669]: I1210 15:22:50.955849 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lb6d9"] Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.015274 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q2fx\" (UniqueName: \"kubernetes.io/projected/0f3441be-4b11-4f4a-b072-7ca1894c5f86-kube-api-access-2q2fx\") pod \"certified-operators-h89pm\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.062037 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-catalog-content\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.062095 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqd76\" (UniqueName: \"kubernetes.io/projected/99dd4f24-38ac-4110-a330-19ab7710acd9-kube-api-access-fqd76\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.062180 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-utilities\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.062231 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.062511 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.562499189 +0000 UTC m=+145.479445816 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.083008 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.140906 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t5l5h"] Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.142182 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.174858 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t5l5h"] Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.175411 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.175518 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.675502616 +0000 UTC m=+145.592449243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.175653 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-utilities\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.175683 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.176066 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-utilities\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.176288 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.676280774 +0000 UTC m=+145.593227401 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.176449 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-catalog-content\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.176478 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqd76\" (UniqueName: \"kubernetes.io/projected/99dd4f24-38ac-4110-a330-19ab7710acd9-kube-api-access-fqd76\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.177169 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-catalog-content\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.252235 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqd76\" (UniqueName: \"kubernetes.io/projected/99dd4f24-38ac-4110-a330-19ab7710acd9-kube-api-access-fqd76\") pod \"community-operators-lb6d9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.252507 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.333858 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.334130 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-utilities\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.334162 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-catalog-content\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.334259 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p55pk\" (UniqueName: \"kubernetes.io/projected/8d636e2e-c705-4462-bc33-88f18c5f3aa2-kube-api-access-p55pk\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.334520 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.83449433 +0000 UTC m=+145.751440967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.415971 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.419913 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qcjfk"] Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.421020 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.441603 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-utilities\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.441785 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:51 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:51 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:51 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.441815 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.442354 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-utilities\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.441647 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm6tx\" (UniqueName: \"kubernetes.io/projected/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-kube-api-access-sm6tx\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.442469 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-utilities\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.442496 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-catalog-content\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.442516 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-catalog-content\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.442568 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p55pk\" (UniqueName: \"kubernetes.io/projected/8d636e2e-c705-4462-bc33-88f18c5f3aa2-kube-api-access-p55pk\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.442669 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.442919 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:51.942910128 +0000 UTC m=+145.859856755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.443149 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-catalog-content\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.540572 4669 patch_prober.go:28] interesting pod/console-operator-58897d9998-dzj9z container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.540615 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" podUID="11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.541056 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-q7jp7" event={"ID":"bef917d6-d516-41ca-ab40-e5d138f08a69","Type":"ContainerStarted","Data":"b2deb73db969b7dc8140578e7048c267ab506a1b234052ab4ffb5e7245fdaa52"} Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.544132 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.544518 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm6tx\" (UniqueName: \"kubernetes.io/projected/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-kube-api-access-sm6tx\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.544543 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-utilities\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.544564 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-catalog-content\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.544953 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-catalog-content\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.545024 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.045007345 +0000 UTC m=+145.961953972 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.545524 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-utilities\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.641127 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qcjfk"] Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.646323 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.647522 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.147508672 +0000 UTC m=+146.064455299 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.748942 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.749412 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.248938312 +0000 UTC m=+146.165884939 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.749438 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.749857 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.249850574 +0000 UTC m=+146.166797201 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.852128 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p55pk\" (UniqueName: \"kubernetes.io/projected/8d636e2e-c705-4462-bc33-88f18c5f3aa2-kube-api-access-p55pk\") pod \"certified-operators-t5l5h\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.854384 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.854657 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.354642856 +0000 UTC m=+146.271589483 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.880355 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm6tx\" (UniqueName: \"kubernetes.io/projected/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-kube-api-access-sm6tx\") pod \"community-operators-qcjfk\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:51 crc kubenswrapper[4669]: I1210 15:22:51.960120 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:51 crc kubenswrapper[4669]: E1210 15:22:51.960774 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.460754048 +0000 UTC m=+146.377700675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.041650 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.061006 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.061473 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.561458482 +0000 UTC m=+146.478405109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.070740 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.162558 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.163882 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.663868357 +0000 UTC m=+146.580814984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.265162 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.265847 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.76581797 +0000 UTC m=+146.682764597 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.313507 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:52 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:52 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:52 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.313971 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.369294 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.369704 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.869691719 +0000 UTC m=+146.786638346 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.475663 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.475905 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:52.975891074 +0000 UTC m=+146.892837701 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.578649 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.579113 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.079098047 +0000 UTC m=+146.996044664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.680867 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.681381 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.181360707 +0000 UTC m=+147.098307334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.791093 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.791934 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.291908306 +0000 UTC m=+147.208854933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.814394 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" event={"ID":"4eeead7e-1859-447b-a93d-f68fb80a119e","Type":"ContainerStarted","Data":"9d9555b3a4069221b375baf767f77f25f42a64c6fa26050129566d4daa17c928"} Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.879372 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" event={"ID":"7599428a-06ae-462c-bb3b-bb6d9ceae3b0","Type":"ContainerStarted","Data":"08d56a5300da62f8485e2340d9bc1088195c9d73d5764b26e5d33d1486f53230"} Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.896156 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.899389 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.399276049 +0000 UTC m=+147.316222676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.899698 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:52 crc kubenswrapper[4669]: E1210 15:22:52.900088 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.400061968 +0000 UTC m=+147.317008595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:52 crc kubenswrapper[4669]: I1210 15:22:52.903646 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" event={"ID":"723f1344-9955-47c5-adca-3c5059f7a61f","Type":"ContainerStarted","Data":"34d437914b2e4fc0e5ffca6518c25e4d71e50e286c665fc65cebafa4cc2adb8a"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.000691 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.000937 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.500921725 +0000 UTC m=+147.417868352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.012084 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" event={"ID":"d2501fbc-7568-4a6a-8200-465507ac4e49","Type":"ContainerStarted","Data":"528c7cc0da17d4583744010e65f0a08a7e842c585dd8faf2c64131e913d0bf26"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.015102 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" event={"ID":"17036398-eaeb-4ce2-9420-57cd9213ecbc","Type":"ContainerStarted","Data":"3df449962eb6151e4615a0d5a2f267d43e88817038cefb39b7febf1ff60eb46e"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.016084 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vzc96" event={"ID":"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a","Type":"ContainerStarted","Data":"de4d47dd357d67ade496bd45b3aa52c8ae3e41f8d9a7d9172da812ba345b46a0"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.071948 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-brqbq" podStartSLOduration=126.071925689 podStartE2EDuration="2m6.071925689s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:53.055527189 +0000 UTC m=+146.972473816" watchObservedRunningTime="2025-12-10 15:22:53.071925689 +0000 UTC m=+146.988872316" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.073929 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h89pm"] Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.115468 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.116672 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.616648827 +0000 UTC m=+147.533595464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.185207 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" event={"ID":"f726edde-37ea-43cd-8c7c-16b1263647d2","Type":"ContainerStarted","Data":"49b4cac78b4493fd6f4f3f3800bb33e928c3d216817da4caacf2648103e06461"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.207183 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" podStartSLOduration=126.207159868 podStartE2EDuration="2m6.207159868s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:53.206980124 +0000 UTC m=+147.123926751" watchObservedRunningTime="2025-12-10 15:22:53.207159868 +0000 UTC m=+147.124106495" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.220050 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.221935 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.721913339 +0000 UTC m=+147.638859956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.265345 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" event={"ID":"439c2c2a-7b67-41a3-8544-8d2362d0db1b","Type":"ContainerStarted","Data":"7d6e2c0332374484f27d422e29cc00b79e5b03ed00a6604929ea013b713ab5ec"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.312161 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" podStartSLOduration=126.312136383 podStartE2EDuration="2m6.312136383s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:53.31073591 +0000 UTC m=+147.227682537" watchObservedRunningTime="2025-12-10 15:22:53.312136383 +0000 UTC m=+147.229083010" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.315432 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:53 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:53 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:53 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.315490 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.322230 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.322307 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.322674 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.822662334 +0000 UTC m=+147.739608961 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.323866 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.343830 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" event={"ID":"95d741d8-41e2-4b8d-9fcd-b11f972345bf","Type":"ContainerStarted","Data":"fbbe3a89692450f1cc4b0da3a430601cc7449c4e543cf789a6181c3a519d3ce7"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.344922 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.346256 4669 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-qbnt7 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.25:6443/healthz\": dial tcp 10.217.0.25:6443: connect: connection refused" start-of-body= Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.346285 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.25:6443/healthz\": dial tcp 10.217.0.25:6443: connect: connection refused" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.374081 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-vnn4r"] Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.374997 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.380541 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-g997m" event={"ID":"4174704b-c801-44b5-9c3b-93dd6d8a3f52","Type":"ContainerStarted","Data":"da6d8b207f0dc128b323688e8f25396b22709b05718a3fa1ec28d49fcf562329"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.381369 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.381463 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" podStartSLOduration=125.381442307 podStartE2EDuration="2m5.381442307s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:53.376448368 +0000 UTC m=+147.293394995" watchObservedRunningTime="2025-12-10 15:22:53.381442307 +0000 UTC m=+147.298388924" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.412132 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" event={"ID":"d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4","Type":"ContainerStarted","Data":"aca3ca4eb7805ddc7760d3bd1e1a01c6402f262538cac3dcc0d78636a0ec747d"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.423149 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.423370 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.423437 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.423455 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.424251 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:53.924232289 +0000 UTC m=+147.841178916 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.466434 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnn4r"] Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.526336 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-catalog-content\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.526445 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcllm\" (UniqueName: \"kubernetes.io/projected/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-kube-api-access-lcllm\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.526543 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.526579 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-utilities\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.796901 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.797189 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.29717261 +0000 UTC m=+148.214119237 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.797793 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.806546 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.808637 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.816476 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.825023 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.831981 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" event={"ID":"a40f1577-aae9-4e5c-bfdb-21dd1a00445d","Type":"ContainerStarted","Data":"f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.833680 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.834731 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.835348 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-catalog-content\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.835509 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcllm\" (UniqueName: \"kubernetes.io/projected/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-kube-api-access-lcllm\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.835695 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-utilities\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.835968 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.335935695 +0000 UTC m=+148.252882322 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.836656 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-catalog-content\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.837272 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-utilities\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.836763 4669 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6c97z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.837532 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.844975 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" event={"ID":"99515d23-15fa-4bae-be9e-c0a2e2c46c89","Type":"ContainerStarted","Data":"ceb67531ab37842da4761d1caa79891501aff1d0a4fd50ca798537641c79ef62"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.853495 4669 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jt6sh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" start-of-body= Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.853600 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" podUID="6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": dial tcp 10.217.0.37:5443: connect: connection refused" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.861565 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.894247 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" podStartSLOduration=127.894190316 podStartE2EDuration="2m7.894190316s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:53.862025898 +0000 UTC m=+147.778972525" watchObservedRunningTime="2025-12-10 15:22:53.894190316 +0000 UTC m=+147.811136943" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.937001 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.938878 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" event={"ID":"441f1126-5609-431e-bcb2-3e4b6da1b19a","Type":"ContainerStarted","Data":"537f7168df8215d9247e79ed80289f41e717f2f66e41590cebb0f9dfa2ef3ea2"} Dec 10 15:22:53 crc kubenswrapper[4669]: E1210 15:22:53.940529 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.440515282 +0000 UTC m=+148.357461899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.946095 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ssjx9"] Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.947232 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.956797 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podStartSLOduration=125.956754109 podStartE2EDuration="2m5.956754109s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:53.935901261 +0000 UTC m=+147.852847888" watchObservedRunningTime="2025-12-10 15:22:53.956754109 +0000 UTC m=+147.873700736" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.981179 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcllm\" (UniqueName: \"kubernetes.io/projected/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-kube-api-access-lcllm\") pod \"redhat-marketplace-vnn4r\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.997142 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" event={"ID":"e94faf4e-1a2e-4322-ac1b-d922b513b73d","Type":"ContainerStarted","Data":"2164cc7f76d3aeceb4b17ab24dba2265ce93f3a8b0787dc21218ce532f6f4446"} Dec 10 15:22:53 crc kubenswrapper[4669]: I1210 15:22:53.999163 4669 generic.go:334] "Generic (PLEG): container finished" podID="724e9f61-b03e-46ab-86d1-b41eaa447b25" containerID="c16bdf985b726c5c3d1e4c380e028db42b5226d756a05c8cacd6bbce759eb6c8" exitCode=0 Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.007045 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" event={"ID":"724e9f61-b03e-46ab-86d1-b41eaa447b25","Type":"ContainerDied","Data":"c16bdf985b726c5c3d1e4c380e028db42b5226d756a05c8cacd6bbce759eb6c8"} Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.024166 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-g997m" podStartSLOduration=126.024149687 podStartE2EDuration="2m6.024149687s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.023524943 +0000 UTC m=+147.940471570" watchObservedRunningTime="2025-12-10 15:22:54.024149687 +0000 UTC m=+147.941096314" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.040850 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.041203 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-catalog-content\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.041271 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-utilities\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.041325 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsch4\" (UniqueName: \"kubernetes.io/projected/71b77d7f-f74a-4442-a9df-2c36237983a2-kube-api-access-zsch4\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.042509 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.542481384 +0000 UTC m=+148.459428011 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.045444 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xmtb8"] Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.046430 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.057778 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.120567 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.121046 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xmtb8"] Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.140261 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-k572g" podStartSLOduration=127.140205877 podStartE2EDuration="2m7.140205877s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.099277841 +0000 UTC m=+148.016224468" watchObservedRunningTime="2025-12-10 15:22:54.140205877 +0000 UTC m=+148.057152504" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.143763 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-catalog-content\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.143812 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-utilities\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.143911 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-utilities\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.144053 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkpq8\" (UniqueName: \"kubernetes.io/projected/1654a8ed-45e1-416b-9082-21c947d03a70-kube-api-access-qkpq8\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.144078 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsch4\" (UniqueName: \"kubernetes.io/projected/71b77d7f-f74a-4442-a9df-2c36237983a2-kube-api-access-zsch4\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.144098 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-catalog-content\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.144118 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.147617 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-utilities\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.148922 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.648906145 +0000 UTC m=+148.565852832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.149642 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-catalog-content\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.151477 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ssjx9"] Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.230429 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsch4\" (UniqueName: \"kubernetes.io/projected/71b77d7f-f74a-4442-a9df-2c36237983a2-kube-api-access-zsch4\") pod \"redhat-marketplace-ssjx9\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.252854 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.253131 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkpq8\" (UniqueName: \"kubernetes.io/projected/1654a8ed-45e1-416b-9082-21c947d03a70-kube-api-access-qkpq8\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.253157 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-catalog-content\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.253239 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-utilities\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.253641 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-utilities\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.253707 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.753692116 +0000 UTC m=+148.670638743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.254655 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-catalog-content\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.374003 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.374321 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.874309275 +0000 UTC m=+148.791255902 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.391152 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkpq8\" (UniqueName: \"kubernetes.io/projected/1654a8ed-45e1-416b-9082-21c947d03a70-kube-api-access-qkpq8\") pod \"redhat-operators-xmtb8\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.474971 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.475877 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:54.975859059 +0000 UTC m=+148.892805676 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.492550 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.493643 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.511488 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" podStartSLOduration=126.511470818 podStartE2EDuration="2m6.511470818s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.266513562 +0000 UTC m=+148.183460189" watchObservedRunningTime="2025-12-10 15:22:54.511470818 +0000 UTC m=+148.428417445" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.512022 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-wvzzm" podStartSLOduration=127.512017122 podStartE2EDuration="2m7.512017122s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.502043934 +0000 UTC m=+148.418990561" watchObservedRunningTime="2025-12-10 15:22:54.512017122 +0000 UTC m=+148.428963749" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.548179 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:54 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:54 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:54 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.548262 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.552451 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-bqznv" podStartSLOduration=127.552423007 podStartE2EDuration="2m7.552423007s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.548837881 +0000 UTC m=+148.465784498" watchObservedRunningTime="2025-12-10 15:22:54.552423007 +0000 UTC m=+148.469369634" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.573616 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lmf56"] Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.574658 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.583071 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.583536 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.083520259 +0000 UTC m=+149.000466886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.690115 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.690867 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-catalog-content\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.690956 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.190941223 +0000 UTC m=+149.107887840 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.691014 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.699108 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg755\" (UniqueName: \"kubernetes.io/projected/e6b094d9-c376-4f11-8c0e-7764c92d1031-kube-api-access-cg755\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.699232 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-utilities\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.699464 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.199440786 +0000 UTC m=+149.116387413 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.741383 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.741729 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.751597 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.771331 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" podStartSLOduration=127.771310831 podStartE2EDuration="2m7.771310831s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.770568822 +0000 UTC m=+148.687515449" watchObservedRunningTime="2025-12-10 15:22:54.771310831 +0000 UTC m=+148.688257458" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.804270 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.804593 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg755\" (UniqueName: \"kubernetes.io/projected/e6b094d9-c376-4f11-8c0e-7764c92d1031-kube-api-access-cg755\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.804636 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-utilities\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.804710 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-catalog-content\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.805299 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-catalog-content\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.805394 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.305374143 +0000 UTC m=+149.222320770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.805946 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-utilities\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.850297 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lmf56"] Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.893090 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-f7zxn" podStartSLOduration=126.893065917 podStartE2EDuration="2m6.893065917s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:54.817108984 +0000 UTC m=+148.734055611" watchObservedRunningTime="2025-12-10 15:22:54.893065917 +0000 UTC m=+148.810012544" Dec 10 15:22:54 crc kubenswrapper[4669]: I1210 15:22:54.906393 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:54 crc kubenswrapper[4669]: E1210 15:22:54.977193 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.477170834 +0000 UTC m=+149.394117461 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.004652 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg755\" (UniqueName: \"kubernetes.io/projected/e6b094d9-c376-4f11-8c0e-7764c92d1031-kube-api-access-cg755\") pod \"redhat-operators-lmf56\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.013888 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.014607 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.514587928 +0000 UTC m=+149.431534555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.054119 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.054159 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.078837 4669 patch_prober.go:28] interesting pod/console-f9d7485db-dsw2s container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.078921 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dsw2s" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.082599 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" event={"ID":"723f1344-9955-47c5-adca-3c5059f7a61f","Type":"ContainerStarted","Data":"47acc1224fbaac705eed23f29cb9bd60a0eb16da6685394f6c790b131cbdeb6b"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.089240 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tscn9" event={"ID":"41607592-fbbb-4003-b9eb-b11cbce16627","Type":"ContainerStarted","Data":"bb995554bdf20e49edd559277498245224bbb59ba928887a3467707bd0f96768"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.094316 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.103143 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.103251 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.116702 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.117174 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.617159975 +0000 UTC m=+149.534106602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.124064 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-p2s7k" event={"ID":"09b754f9-20f8-4d4f-ad25-8fed880f53bb","Type":"ContainerStarted","Data":"6d9ae6d4f00f42e032ce11b6bd325639551325dfb6703992bcf06f28a59f8aea"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.147288 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-lb6d9"] Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.153176 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-qd9lk" podStartSLOduration=128.153159405 podStartE2EDuration="2m8.153159405s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.151511926 +0000 UTC m=+149.068458563" watchObservedRunningTime="2025-12-10 15:22:55.153159405 +0000 UTC m=+149.070106032" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.159128 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" event={"ID":"f726edde-37ea-43cd-8c7c-16b1263647d2","Type":"ContainerStarted","Data":"52b8d89ebb2580125a021805cd9ef76a56652e3bbd838f144141d2ca39b07779"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.191094 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" event={"ID":"4eeead7e-1859-447b-a93d-f68fb80a119e","Type":"ContainerStarted","Data":"08bdec00de23efaddef2bc17efe299d0bdfb1f9d8c5b7a0c9676aff671f565d5"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.217388 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" event={"ID":"7599428a-06ae-462c-bb3b-bb6d9ceae3b0","Type":"ContainerStarted","Data":"3981f2fbc9e75fa74aa341f32a08338c659edc7dc35059b8c84e3b23d1faf972"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.224234 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.225898 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.725873641 +0000 UTC m=+149.642820258 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.246059 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-tscn9" podStartSLOduration=128.246033461 podStartE2EDuration="2m8.246033461s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.24259502 +0000 UTC m=+149.159541647" watchObservedRunningTime="2025-12-10 15:22:55.246033461 +0000 UTC m=+149.162980088" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.277635 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.278102 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-vzc96" event={"ID":"fe3c350b-f00c-4f0a-8e65-eec3eaf60f3a","Type":"ContainerStarted","Data":"66ba8a6d0aaf21848dce173eac401811a82fa06b45cef15d7661886eec5f8304"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.278812 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-vzc96" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.278822 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.278907 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.281160 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.281204 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.303466 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:55 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:55 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:55 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.303546 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.316742 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" event={"ID":"17036398-eaeb-4ce2-9420-57cd9213ecbc","Type":"ContainerStarted","Data":"3d349f6718b5911efe0611433dd2dba005c8c5a421a526d50fa60b7c62cbee58"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.337973 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.340825 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.840810944 +0000 UTC m=+149.757757571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.349178 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-2rh6l" podStartSLOduration=128.349137213 podStartE2EDuration="2m8.349137213s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.343067887 +0000 UTC m=+149.260014514" watchObservedRunningTime="2025-12-10 15:22:55.349137213 +0000 UTC m=+149.266083840" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.351329 4669 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6c97z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.351382 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.351442 4669 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6c97z container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.351456 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.354840 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" event={"ID":"9aed1358-8ab8-4b48-920c-d4e3377dcf40","Type":"ContainerStarted","Data":"75b5e2118f3fe64848a5c97e1061e59b82901fa81fb876b52c58c92757c9a928"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.390025 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.435134 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" event={"ID":"6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c","Type":"ContainerStarted","Data":"68edfef2505d4a2023cad6a223ea4bd7330f117163400ac88dedd739b4850a78"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.444992 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.446467 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:55.946445555 +0000 UTC m=+149.863392182 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.449204 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hdzbf" podStartSLOduration=128.44918091 podStartE2EDuration="2m8.44918091s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.410966478 +0000 UTC m=+149.327913105" watchObservedRunningTime="2025-12-10 15:22:55.44918091 +0000 UTC m=+149.366127537" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.460399 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-r92cx" podStartSLOduration=127.460366887 podStartE2EDuration="2m7.460366887s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.448041683 +0000 UTC m=+149.364988310" watchObservedRunningTime="2025-12-10 15:22:55.460366887 +0000 UTC m=+149.377313514" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.535698 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" event={"ID":"372626a6-fd28-4cbb-93e5-e6520b30c3ce","Type":"ContainerStarted","Data":"63fcbc920969ac36b51df8887dadd70b658093ec63683b112ebe977597dd4663"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.546842 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.549852 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.049832892 +0000 UTC m=+149.966779529 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.555201 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-q2hb9" podStartSLOduration=128.55515022 podStartE2EDuration="2m8.55515022s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.551002781 +0000 UTC m=+149.467949428" watchObservedRunningTime="2025-12-10 15:22:55.55515022 +0000 UTC m=+149.472096847" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.557706 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-zszbg" event={"ID":"e32e3dfe-0229-477b-8e6c-bd40314231ee","Type":"ContainerStarted","Data":"708f96ad821453560ecb4b9223eaeba3a5e0f3a70d551b4777be7faf85de9682"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.609901 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5mc9q" event={"ID":"6b038ccf-005f-4c1b-b9ec-d9db407f528a","Type":"ContainerStarted","Data":"ad9efcaddae394f72ca70b99b89aceaa5c2d95a1d78d4e25cd777be2eb745494"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.647255 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" event={"ID":"15b4e0f9-29d2-4e88-8588-45d668e7f1ad","Type":"ContainerStarted","Data":"48954859dc02b923cc8485afb28ff12ac20385ac5eddee25791c0c0a9f1436d0"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.648394 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.650007 4669 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9jrwx container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.650044 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" podUID="15b4e0f9-29d2-4e88-8588-45d668e7f1ad" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/healthz\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.654063 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.655317 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.155297689 +0000 UTC m=+150.072244316 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.687759 4669 generic.go:334] "Generic (PLEG): container finished" podID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerID="a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86" exitCode=0 Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.688999 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerDied","Data":"a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.689038 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerStarted","Data":"20c0d8248416e41ba891d2dd034ab3bf39c1525532234ae649b3698a3a0e6eb4"} Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.692416 4669 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6c97z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.692461 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.702707 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.717417 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" podStartSLOduration=128.717396472 podStartE2EDuration="2m8.717396472s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.637674629 +0000 UTC m=+149.554621256" watchObservedRunningTime="2025-12-10 15:22:55.717396472 +0000 UTC m=+149.634343099" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.753667 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-rx9fw" podStartSLOduration=128.753644277 podStartE2EDuration="2m8.753644277s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.72149994 +0000 UTC m=+149.638446567" watchObservedRunningTime="2025-12-10 15:22:55.753644277 +0000 UTC m=+149.670590904" Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.755013 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t5l5h"] Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.756448 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.757818 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.257800187 +0000 UTC m=+150.174746824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.857395 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.857490 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.357466075 +0000 UTC m=+150.274412702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.858553 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.862667 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.362652958 +0000 UTC m=+150.279599585 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:55 crc kubenswrapper[4669]: I1210 15:22:55.960986 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:55 crc kubenswrapper[4669]: E1210 15:22:55.961571 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.461545879 +0000 UTC m=+150.378492496 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.084911 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.085537 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.585521729 +0000 UTC m=+150.502468356 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.186659 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.187266 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.687207845 +0000 UTC m=+150.604154472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.187738 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.188473 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.688466025 +0000 UTC m=+150.605412652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.263076 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" podStartSLOduration=130.263052345 podStartE2EDuration="2m10.263052345s" podCreationTimestamp="2025-12-10 15:20:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:56.12035652 +0000 UTC m=+150.037303157" watchObservedRunningTime="2025-12-10 15:22:56.263052345 +0000 UTC m=+150.179998972" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.265039 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-vzc96" podStartSLOduration=14.265029543 podStartE2EDuration="14.265029543s" podCreationTimestamp="2025-12-10 15:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:55.779632057 +0000 UTC m=+149.696578684" watchObservedRunningTime="2025-12-10 15:22:56.265029543 +0000 UTC m=+150.181976170" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.291890 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.292326 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.792307774 +0000 UTC m=+150.709254401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.295650 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.302491 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:56 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:56 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:56 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.302556 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.332483 4669 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-qbnt7 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.25:6443/healthz\": context deadline exceeded" start-of-body= Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.332553 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.25:6443/healthz\": context deadline exceeded" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.349524 4669 patch_prober.go:28] interesting pod/console-operator-58897d9998-dzj9z container/console-operator namespace/openshift-console-operator: Liveness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.350003 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" podUID="11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.353353 4669 patch_prober.go:28] interesting pod/console-operator-58897d9998-dzj9z container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.353502 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" podUID="11b7bdd7-6e4c-46aa-a469-bb2f7acfb68f" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.389186 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" podStartSLOduration=128.389159465 podStartE2EDuration="2m8.389159465s" podCreationTimestamp="2025-12-10 15:20:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:22:56.379875714 +0000 UTC m=+150.296822341" watchObservedRunningTime="2025-12-10 15:22:56.389159465 +0000 UTC m=+150.306106082" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.394667 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.395110 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.895090757 +0000 UTC m=+150.812037384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.452156 4669 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jt6sh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.461337 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" podUID="6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.464765 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qcjfk"] Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.497936 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.498488 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.998447635 +0000 UTC m=+150.915394262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.498563 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.499124 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:56.99911618 +0000 UTC m=+150.916062807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.601716 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.602417 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.102395225 +0000 UTC m=+151.019341852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.711072 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.711748 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.211727885 +0000 UTC m=+151.128674512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.729826 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerStarted","Data":"e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.729884 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerStarted","Data":"7368bbce2e383e68878d2dc55e232ce50aec7a999ed2ef0297cbddab68c34c0e"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.745955 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerStarted","Data":"054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.746003 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerStarted","Data":"b1b13b5abb3505b4188da76f89fc693a8d2cd6605293280c779dbe5c4a27fe4b"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.780616 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" event={"ID":"68382e07-892e-4adf-a43b-de6b61754e76","Type":"ContainerStarted","Data":"123658e16bd59b1109d8132bb6b609bcf00fb9b8fcaee20571d51c8382d51ae8"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.795051 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" event={"ID":"372626a6-fd28-4cbb-93e5-e6520b30c3ce","Type":"ContainerStarted","Data":"19d53b52b5f0785a1d4d45ac55e02cb853ac0c520c12d66c7d6cd7acf42c4dc6"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.807056 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" event={"ID":"724e9f61-b03e-46ab-86d1-b41eaa447b25","Type":"ContainerStarted","Data":"4bfdd1967c2f2fb1caed75ce5f0c62b9b155721c42d55f3eb9b093370f87f4ca"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.813327 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.813698 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.313678158 +0000 UTC m=+151.230624785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.824048 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerStarted","Data":"762739c17969b30db736f0142ec8b018fb076b32a4a5a0fbfbb1f19c2a090f93"} Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.830408 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.830486 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.830596 4669 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6c97z container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" start-of-body= Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.830612 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.21:8080/healthz\": dial tcp 10.217.0.21:8080: connect: connection refused" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.867527 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9jrwx" Dec 10 15:22:56 crc kubenswrapper[4669]: I1210 15:22:56.916455 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:56 crc kubenswrapper[4669]: E1210 15:22:56.924875 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.424852421 +0000 UTC m=+151.341799228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.023119 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.024068 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.524051399 +0000 UTC m=+151.440998026 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.124644 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.125081 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.62506567 +0000 UTC m=+151.542012297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.225915 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.226531 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.726501132 +0000 UTC m=+151.643447759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.337392 4669 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-qbnt7 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.25:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.337451 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.25:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.338102 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.338684 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.838667709 +0000 UTC m=+151.755614336 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.354396 4669 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jt6sh container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.354564 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" podUID="6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.354617 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:57 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:57 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:57 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.354863 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.440612 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.440941 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:57.940920959 +0000 UTC m=+151.857867586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.451835 4669 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-t2f75 container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]log ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]etcd ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 10 15:22:57 crc kubenswrapper[4669]: [-]poststarthook/generic-apiserver-start-informers failed: reason withheld Dec 10 15:22:57 crc kubenswrapper[4669]: [+]poststarthook/max-in-flight-filter ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]poststarthook/openshift.io-StartUserInformer ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]poststarthook/openshift.io-StartOAuthInformer ok Dec 10 15:22:57 crc kubenswrapper[4669]: [+]poststarthook/openshift.io-StartTokenTimeoutUpdater ok Dec 10 15:22:57 crc kubenswrapper[4669]: livez check failed Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.452334 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" podUID="439c2c2a-7b67-41a3-8544-8d2362d0db1b" containerName="oauth-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.463548 4669 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-jt6sh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.463645 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" podUID="6c7ab3cf-81fe-49c6-b70c-47e8bd891e8c" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.37:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.542900 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.543419 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.043403715 +0000 UTC m=+151.960350342 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.550652 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnn4r"] Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.645248 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.645800 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.145775538 +0000 UTC m=+152.062722165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.750764 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.751501 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.251477981 +0000 UTC m=+152.168424608 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.852059 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.852723 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.352706358 +0000 UTC m=+152.269652985 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.835337 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.900850 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"3ed1d52a0dfcf8be452da967b4eaf9f82808d601149a50ff8a46f647fcf63939"} Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.931453 4669 generic.go:334] "Generic (PLEG): container finished" podID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerID="d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0" exitCode=0 Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.932615 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jt6sh" Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.932689 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerDied","Data":"d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0"} Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.960318 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:57 crc kubenswrapper[4669]: E1210 15:22:57.960813 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.460797067 +0000 UTC m=+152.377743694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.980977 4669 generic.go:334] "Generic (PLEG): container finished" podID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerID="e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796" exitCode=0 Dec 10 15:22:57 crc kubenswrapper[4669]: I1210 15:22:57.981078 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerDied","Data":"e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796"} Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.018411 4669 generic.go:334] "Generic (PLEG): container finished" podID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerID="054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8" exitCode=0 Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.018486 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerDied","Data":"054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8"} Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.036446 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerStarted","Data":"34d212d714d5c08d5fc57ff2ba17e43805cf3104a6e2e0a63a86f9204fbc1ef0"} Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.036635 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.036672 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.036746 4669 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-vbwnj container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.036763 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" podUID="724e9f61-b03e-46ab-86d1-b41eaa447b25" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.063189 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.067715 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.567679408 +0000 UTC m=+152.484626035 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.166202 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.175114 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.675099052 +0000 UTC m=+152.592045679 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.274764 4669 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-vbwnj container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.275196 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" podUID="724e9f61-b03e-46ab-86d1-b41eaa447b25" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.274794 4669 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-vbwnj container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.275512 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" podUID="724e9f61-b03e-46ab-86d1-b41eaa447b25" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.275709 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.286592 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.786564923 +0000 UTC m=+152.703511550 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.305005 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:58 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:58 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:58 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.305087 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.389099 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.389566 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.88954971 +0000 UTC m=+152.806496337 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.491701 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.491900 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.991868712 +0000 UTC m=+152.908815339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.492172 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.492887 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:58.992878877 +0000 UTC m=+152.909825514 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.600665 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.601585 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.101554011 +0000 UTC m=+153.018500638 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.702316 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.702617 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.202605873 +0000 UTC m=+153.119552500 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.703023 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lmf56"] Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.744768 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.744828 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.816670 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.817035 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.317020713 +0000 UTC m=+153.233967340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.820262 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xmtb8"] Dec 10 15:22:58 crc kubenswrapper[4669]: I1210 15:22:58.919052 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:58 crc kubenswrapper[4669]: E1210 15:22:58.919463 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.419442238 +0000 UTC m=+153.336388865 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.019953 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.021082 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.521057733 +0000 UTC m=+153.438004360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.054872 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"234fec5a1230385b21ec5b26650768a701736c2bbd762b17bfe8ded7e5af4741"} Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.054934 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2ef3efcae6875c8b34a165318105fd31cc8088f84a58896c5a654dc8abdb9f24"} Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.077843 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerStarted","Data":"6224465508cc0cbed9b1ad57b3695be0ebf2bc5c862d46c3aa83c942f0280b60"} Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.113100 4669 generic.go:334] "Generic (PLEG): container finished" podID="a88168e0-0728-4c47-8d89-5ece2fa293b9" containerID="af360676c0f5d8565e7270fc744610842736e7959a8505cc46a0a9c6b6d7ca83" exitCode=0 Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.113260 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" event={"ID":"a88168e0-0728-4c47-8d89-5ece2fa293b9","Type":"ContainerDied","Data":"af360676c0f5d8565e7270fc744610842736e7959a8505cc46a0a9c6b6d7ca83"} Dec 10 15:22:59 crc kubenswrapper[4669]: W1210 15:22:59.116487 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-4a89051452b32a9b9412389fa573ab41fd0794736c49e75878af1d37ece2adf2 WatchSource:0}: Error finding container 4a89051452b32a9b9412389fa573ab41fd0794736c49e75878af1d37ece2adf2: Status 404 returned error can't find the container with id 4a89051452b32a9b9412389fa573ab41fd0794736c49e75878af1d37ece2adf2 Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.144797 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" event={"ID":"68382e07-892e-4adf-a43b-de6b61754e76","Type":"ContainerStarted","Data":"a6106f0582f9f097575b3419a3e643748a2f1c6b85ffc860f722674a1f8a1d3d"} Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.161762 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.162446 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.662433008 +0000 UTC m=+153.579379635 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.268886 4669 generic.go:334] "Generic (PLEG): container finished" podID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerID="9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e" exitCode=0 Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.269037 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.268877 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ssjx9"] Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.269374 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerDied","Data":"9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e"} Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.269454 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.769429162 +0000 UTC m=+153.686375789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.271402 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.289754 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.789729536 +0000 UTC m=+153.706676163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.302028 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:22:59 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:22:59 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:22:59 crc kubenswrapper[4669]: healthz check failed Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.302104 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.316897 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerStarted","Data":"58d3cf3ba9a6a55159b39500405353e87452e2e3e00b6795ec69f2ebed92c3a1"} Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.350578 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1e810421d09d1d29baa2c1cbb5a3648c2509adf4887622088fab027ada05c50d"} Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.350646 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.392805 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.392983 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.89295793 +0000 UTC m=+153.809904557 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.393107 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.394647 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.89463159 +0000 UTC m=+153.811578217 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: W1210 15:22:59.417528 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71b77d7f_f74a_4442_a9df_2c36237983a2.slice/crio-f941591895d45cc6a6c2aec6e9182bdd1df3bd856bc8ffdb9cdf8d9eacbe9332 WatchSource:0}: Error finding container f941591895d45cc6a6c2aec6e9182bdd1df3bd856bc8ffdb9cdf8d9eacbe9332: Status 404 returned error can't find the container with id f941591895d45cc6a6c2aec6e9182bdd1df3bd856bc8ffdb9cdf8d9eacbe9332 Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.494574 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.495858 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:22:59.995837115 +0000 UTC m=+153.912783752 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.634433 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.134401112 +0000 UTC m=+154.051347739 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.634492 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.735525 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.735865 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.235846264 +0000 UTC m=+154.152792891 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.840378 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.840823 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.34081041 +0000 UTC m=+154.257757037 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.843033 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.905531 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-t2f75" Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.957489 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:22:59 crc kubenswrapper[4669]: E1210 15:22:59.958470 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.458440297 +0000 UTC m=+154.375386924 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.958577 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:22:59 crc kubenswrapper[4669]: I1210 15:22:59.962427 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.064088 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.065915 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.565898241 +0000 UTC m=+154.482844868 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.166073 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.169062 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.669036843 +0000 UTC m=+154.585983470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.268327 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.268791 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.768774243 +0000 UTC m=+154.685720870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.313498 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:00 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:00 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:00 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.313568 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.374070 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.374165 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.874150429 +0000 UTC m=+154.791097056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.374363 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.374597 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.874589029 +0000 UTC m=+154.791535656 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.375328 4669 generic.go:334] "Generic (PLEG): container finished" podID="1654a8ed-45e1-416b-9082-21c947d03a70" containerID="e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89" exitCode=0 Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.375384 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerDied","Data":"e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89"} Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.441189 4669 generic.go:334] "Generic (PLEG): container finished" podID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerID="de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f" exitCode=0 Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.443291 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4a89051452b32a9b9412389fa573ab41fd0794736c49e75878af1d37ece2adf2"} Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.443317 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerDied","Data":"de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f"} Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.476101 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.476631 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:00.976612905 +0000 UTC m=+154.893559532 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.625372 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.666023 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.165989184 +0000 UTC m=+155.082935811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.722086 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" event={"ID":"68382e07-892e-4adf-a43b-de6b61754e76","Type":"ContainerStarted","Data":"502e2b48360dc66c06b758b49dedf46de55f6d7b8b2d6add876b1ca8f90fcbb0"} Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.730281 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerStarted","Data":"f941591895d45cc6a6c2aec6e9182bdd1df3bd856bc8ffdb9cdf8d9eacbe9332"} Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.786881 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.787331 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.287310801 +0000 UTC m=+155.204257428 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:00 crc kubenswrapper[4669]: I1210 15:23:00.940883 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:00 crc kubenswrapper[4669]: E1210 15:23:00.942073 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.442056344 +0000 UTC m=+155.359002971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.044710 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.045020 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.544996591 +0000 UTC m=+155.461943218 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.271798 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.272256 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.772236675 +0000 UTC m=+155.689183302 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.305833 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:01 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.306024 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.387095 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.387989 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.887964737 +0000 UTC m=+155.804911364 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.471387 4669 patch_prober.go:28] interesting pod/apiserver-76f77b778f-qhbj9 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 10 15:23:01 crc kubenswrapper[4669]: [+]log ok Dec 10 15:23:01 crc kubenswrapper[4669]: [+]etcd ok Dec 10 15:23:01 crc kubenswrapper[4669]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 10 15:23:01 crc kubenswrapper[4669]: [-]poststarthook/generic-apiserver-start-informers failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [+]poststarthook/max-in-flight-filter ok Dec 10 15:23:01 crc kubenswrapper[4669]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 10 15:23:01 crc kubenswrapper[4669]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 10 15:23:01 crc kubenswrapper[4669]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [-]poststarthook/project.openshift.io-projectcache failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 10 15:23:01 crc kubenswrapper[4669]: [-]poststarthook/openshift.io-startinformers failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 10 15:23:01 crc kubenswrapper[4669]: [-]poststarthook/quota.openshift.io-clusterquotamapping failed: reason withheld Dec 10 15:23:01 crc kubenswrapper[4669]: livez check failed Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.471824 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" podUID="372626a6-fd28-4cbb-93e5-e6520b30c3ce" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.492150 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.492555 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:01.992543342 +0000 UTC m=+155.909489969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.545198 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.545869 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.554817 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.555143 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.565132 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.593280 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.593540 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/41147672-f2da-4320-9e81-49a03b3698a4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.593626 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/41147672-f2da-4320-9e81-49a03b3698a4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.593723 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.093709928 +0000 UTC m=+156.010656555 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.742054 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/41147672-f2da-4320-9e81-49a03b3698a4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.742128 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/41147672-f2da-4320-9e81-49a03b3698a4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.742196 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.748328 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/41147672-f2da-4320-9e81-49a03b3698a4-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.757684 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.257664651 +0000 UTC m=+156.174611278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.843017 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:01 crc kubenswrapper[4669]: E1210 15:23:01.843471 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.343455208 +0000 UTC m=+156.260401835 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.864776 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/41147672-f2da-4320-9e81-49a03b3698a4-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.890821 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.892248 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-vbwnj" Dec 10 15:23:01 crc kubenswrapper[4669]: I1210 15:23:01.897108 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"387a6ab3e12741142be53246cc2f90ef45808e5fad704cd468a46cd3d5a23f19"} Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.937507 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.938299 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.944738 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:01.945091 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.445076364 +0000 UTC m=+156.362022991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.978542 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.978747 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.979519 4669 generic.go:334] "Generic (PLEG): container finished" podID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerID="89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd" exitCode=0 Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.979550 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerDied","Data":"89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd"} Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:01.989183 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.051566 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.051757 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5df5a0b2-9082-42eb-b5a2-bca588363646-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.051839 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5df5a0b2-9082-42eb-b5a2-bca588363646-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.052672 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.552656171 +0000 UTC m=+156.469602798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.159576 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5df5a0b2-9082-42eb-b5a2-bca588363646-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.159866 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.159890 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5df5a0b2-9082-42eb-b5a2-bca588363646-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.160265 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5df5a0b2-9082-42eb-b5a2-bca588363646-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.160362 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" podStartSLOduration=20.160336592 podStartE2EDuration="20.160336592s" podCreationTimestamp="2025-12-10 15:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:23:02.05804101 +0000 UTC m=+155.974987637" watchObservedRunningTime="2025-12-10 15:23:02.160336592 +0000 UTC m=+156.077283219" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.160455 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.660443244 +0000 UTC m=+156.577389871 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.261266 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.261856 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.761830435 +0000 UTC m=+156.678777062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.322313 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:02 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:02 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:02 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.322387 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.326442 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5df5a0b2-9082-42eb-b5a2-bca588363646-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.364097 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.364454 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:02.864440243 +0000 UTC m=+156.781386870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.506411 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.506864 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.006847263 +0000 UTC m=+156.923793890 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.506987 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.507338 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.007331704 +0000 UTC m=+156.924278331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.508922 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.546844 4669 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.608795 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.609766 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.109736948 +0000 UTC m=+157.026683575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.714873 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.715465 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.215453712 +0000 UTC m=+157.132400329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.817491 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.817826 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.317794274 +0000 UTC m=+157.234740901 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:02 crc kubenswrapper[4669]: I1210 15:23:02.919388 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:02 crc kubenswrapper[4669]: E1210 15:23:02.919856 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.41984058 +0000 UTC m=+157.336787207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-n4dsm" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.026958 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:03 crc kubenswrapper[4669]: E1210 15:23:03.027179 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-10 15:23:03.527154901 +0000 UTC m=+157.444101528 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.033880 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.047693 4669 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-10T15:23:02.546873677Z","Handler":null,"Name":""} Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.047962 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" event={"ID":"a88168e0-0728-4c47-8d89-5ece2fa293b9","Type":"ContainerDied","Data":"3aa93495eb5b1f976d0d9349ab306d97a9376bef552fc9f3b05c0b1953e5db91"} Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.048015 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3aa93495eb5b1f976d0d9349ab306d97a9376bef552fc9f3b05c0b1953e5db91" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.086539 4669 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.086580 4669 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.120149 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-mjz6f" event={"ID":"68382e07-892e-4adf-a43b-de6b61754e76","Type":"ContainerStarted","Data":"60457f59b92b5b2a0bb22d3987f60fe7546016ce1a7cc6e713521eaf49ee31b7"} Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.128783 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a88168e0-0728-4c47-8d89-5ece2fa293b9-secret-volume\") pod \"a88168e0-0728-4c47-8d89-5ece2fa293b9\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.129015 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a88168e0-0728-4c47-8d89-5ece2fa293b9-config-volume\") pod \"a88168e0-0728-4c47-8d89-5ece2fa293b9\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.129043 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6p45s\" (UniqueName: \"kubernetes.io/projected/a88168e0-0728-4c47-8d89-5ece2fa293b9-kube-api-access-6p45s\") pod \"a88168e0-0728-4c47-8d89-5ece2fa293b9\" (UID: \"a88168e0-0728-4c47-8d89-5ece2fa293b9\") " Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.129307 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.130729 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a88168e0-0728-4c47-8d89-5ece2fa293b9-config-volume" (OuterVolumeSpecName: "config-volume") pod "a88168e0-0728-4c47-8d89-5ece2fa293b9" (UID: "a88168e0-0728-4c47-8d89-5ece2fa293b9"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.167345 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a88168e0-0728-4c47-8d89-5ece2fa293b9-kube-api-access-6p45s" (OuterVolumeSpecName: "kube-api-access-6p45s") pod "a88168e0-0728-4c47-8d89-5ece2fa293b9" (UID: "a88168e0-0728-4c47-8d89-5ece2fa293b9"). InnerVolumeSpecName "kube-api-access-6p45s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.184236 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a88168e0-0728-4c47-8d89-5ece2fa293b9-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a88168e0-0728-4c47-8d89-5ece2fa293b9" (UID: "a88168e0-0728-4c47-8d89-5ece2fa293b9"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.211847 4669 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.211918 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.230598 4669 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a88168e0-0728-4c47-8d89-5ece2fa293b9-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.230630 4669 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a88168e0-0728-4c47-8d89-5ece2fa293b9-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.230643 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6p45s\" (UniqueName: \"kubernetes.io/projected/a88168e0-0728-4c47-8d89-5ece2fa293b9-kube-api-access-6p45s\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.275199 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.310849 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:03 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:03 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:03 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.310916 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.406705 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 10 15:23:03 crc kubenswrapper[4669]: W1210 15:23:03.445073 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod41147672_f2da_4320_9e81_49a03b3698a4.slice/crio-ea0223a094596a5d3cb75dbe4a64c33d79a615c03186e633397b340cef1c4591 WatchSource:0}: Error finding container ea0223a094596a5d3cb75dbe4a64c33d79a615c03186e633397b340cef1c4591: Status 404 returned error can't find the container with id ea0223a094596a5d3cb75dbe4a64c33d79a615c03186e633397b340cef1c4591 Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.505271 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-n4dsm\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:03 crc kubenswrapper[4669]: W1210 15:23:03.535201 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod5df5a0b2_9082_42eb_b5a2_bca588363646.slice/crio-5ac1e25dd2d508d6ac45553155fd3ac43ce2a305ed668ebed0212275b07ede1e WatchSource:0}: Error finding container 5ac1e25dd2d508d6ac45553155fd3ac43ce2a305ed668ebed0212275b07ede1e: Status 404 returned error can't find the container with id 5ac1e25dd2d508d6ac45553155fd3ac43ce2a305ed668ebed0212275b07ede1e Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.538273 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.552132 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 10 15:23:03 crc kubenswrapper[4669]: I1210 15:23:03.711586 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.194937 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"41147672-f2da-4320-9e81-49a03b3698a4","Type":"ContainerStarted","Data":"ea0223a094596a5d3cb75dbe4a64c33d79a615c03186e633397b340cef1c4591"} Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.208234 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr" Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.209946 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5df5a0b2-9082-42eb-b5a2-bca588363646","Type":"ContainerStarted","Data":"5ac1e25dd2d508d6ac45553155fd3ac43ce2a305ed668ebed0212275b07ede1e"} Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.308931 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:04 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:04 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:04 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.309068 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.421620 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.462575 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-vzc96" Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.980801 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:23:04 crc kubenswrapper[4669]: I1210 15:23:04.991058 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qhbj9" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.023119 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n4dsm"] Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.070890 4669 patch_prober.go:28] interesting pod/console-f9d7485db-dsw2s container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.071029 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dsw2s" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.279844 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.279909 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.280170 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.280291 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.301619 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:05 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:05 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:05 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.301682 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.349112 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dzj9z" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.356076 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.745847 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" event={"ID":"20977ed7-6db6-43e9-95a6-95280e2d8814","Type":"ContainerStarted","Data":"89f6c1e88a2088b55eaef38ca9a533acaf1914b7d673304ae158adcaf51ce8db"} Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.750707 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"41147672-f2da-4320-9e81-49a03b3698a4","Type":"ContainerStarted","Data":"e98fb7c2ba6dcc3ba2575db4efce8714f33174e218a6553fb6fb712bc229879e"} Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.773365 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.773345627 podStartE2EDuration="4.773345627s" podCreationTimestamp="2025-12-10 15:23:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:23:05.772539867 +0000 UTC m=+159.689486494" watchObservedRunningTime="2025-12-10 15:23:05.773345627 +0000 UTC m=+159.690292244" Dec 10 15:23:05 crc kubenswrapper[4669]: I1210 15:23:05.793724 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5df5a0b2-9082-42eb-b5a2-bca588363646","Type":"ContainerStarted","Data":"1d3f908ef9685723c277316cdc0c19754d66ad7607654486223468101e84ad3c"} Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.298520 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:06 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:06 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:06 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.299040 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.883424 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" event={"ID":"20977ed7-6db6-43e9-95a6-95280e2d8814","Type":"ContainerStarted","Data":"ddab4cf4bf2128ae7eeb6ccda515bf0d7c2cff1a9c8e3f05fbfdae040744ce9f"} Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.883507 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.918287 4669 generic.go:334] "Generic (PLEG): container finished" podID="41147672-f2da-4320-9e81-49a03b3698a4" containerID="e98fb7c2ba6dcc3ba2575db4efce8714f33174e218a6553fb6fb712bc229879e" exitCode=0 Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.918393 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"41147672-f2da-4320-9e81-49a03b3698a4","Type":"ContainerDied","Data":"e98fb7c2ba6dcc3ba2575db4efce8714f33174e218a6553fb6fb712bc229879e"} Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.927379 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" podStartSLOduration=139.927361881 podStartE2EDuration="2m19.927361881s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:23:06.920140279 +0000 UTC m=+160.837086926" watchObservedRunningTime="2025-12-10 15:23:06.927361881 +0000 UTC m=+160.844308508" Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.971757 4669 generic.go:334] "Generic (PLEG): container finished" podID="5df5a0b2-9082-42eb-b5a2-bca588363646" containerID="1d3f908ef9685723c277316cdc0c19754d66ad7607654486223468101e84ad3c" exitCode=0 Dec 10 15:23:06 crc kubenswrapper[4669]: I1210 15:23:06.971828 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5df5a0b2-9082-42eb-b5a2-bca588363646","Type":"ContainerDied","Data":"1d3f908ef9685723c277316cdc0c19754d66ad7607654486223468101e84ad3c"} Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.306053 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:07 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:07 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:07 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.306166 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.614655 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.662057 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5df5a0b2-9082-42eb-b5a2-bca588363646-kubelet-dir\") pod \"5df5a0b2-9082-42eb-b5a2-bca588363646\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.662121 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5df5a0b2-9082-42eb-b5a2-bca588363646-kube-api-access\") pod \"5df5a0b2-9082-42eb-b5a2-bca588363646\" (UID: \"5df5a0b2-9082-42eb-b5a2-bca588363646\") " Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.663986 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5df5a0b2-9082-42eb-b5a2-bca588363646-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5df5a0b2-9082-42eb-b5a2-bca588363646" (UID: "5df5a0b2-9082-42eb-b5a2-bca588363646"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.689614 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5df5a0b2-9082-42eb-b5a2-bca588363646-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5df5a0b2-9082-42eb-b5a2-bca588363646" (UID: "5df5a0b2-9082-42eb-b5a2-bca588363646"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.771661 4669 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5df5a0b2-9082-42eb-b5a2-bca588363646-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:07 crc kubenswrapper[4669]: I1210 15:23:07.771713 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5df5a0b2-9082-42eb-b5a2-bca588363646-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.008510 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.019837 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"5df5a0b2-9082-42eb-b5a2-bca588363646","Type":"ContainerDied","Data":"5ac1e25dd2d508d6ac45553155fd3ac43ce2a305ed668ebed0212275b07ede1e"} Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.019918 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ac1e25dd2d508d6ac45553155fd3ac43ce2a305ed668ebed0212275b07ede1e" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.303005 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:08 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:08 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:08 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.303097 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.807689 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.813282 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/41147672-f2da-4320-9e81-49a03b3698a4-kube-api-access\") pod \"41147672-f2da-4320-9e81-49a03b3698a4\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.813442 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/41147672-f2da-4320-9e81-49a03b3698a4-kubelet-dir\") pod \"41147672-f2da-4320-9e81-49a03b3698a4\" (UID: \"41147672-f2da-4320-9e81-49a03b3698a4\") " Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.813799 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/41147672-f2da-4320-9e81-49a03b3698a4-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "41147672-f2da-4320-9e81-49a03b3698a4" (UID: "41147672-f2da-4320-9e81-49a03b3698a4"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.819692 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41147672-f2da-4320-9e81-49a03b3698a4-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "41147672-f2da-4320-9e81-49a03b3698a4" (UID: "41147672-f2da-4320-9e81-49a03b3698a4"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.916599 4669 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/41147672-f2da-4320-9e81-49a03b3698a4-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:08 crc kubenswrapper[4669]: I1210 15:23:08.916637 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/41147672-f2da-4320-9e81-49a03b3698a4-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:09 crc kubenswrapper[4669]: I1210 15:23:09.104180 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"41147672-f2da-4320-9e81-49a03b3698a4","Type":"ContainerDied","Data":"ea0223a094596a5d3cb75dbe4a64c33d79a615c03186e633397b340cef1c4591"} Dec 10 15:23:09 crc kubenswrapper[4669]: I1210 15:23:09.104242 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ea0223a094596a5d3cb75dbe4a64c33d79a615c03186e633397b340cef1c4591" Dec 10 15:23:09 crc kubenswrapper[4669]: I1210 15:23:09.104260 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 10 15:23:09 crc kubenswrapper[4669]: I1210 15:23:09.297678 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:09 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:09 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:09 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:09 crc kubenswrapper[4669]: I1210 15:23:09.297736 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:10 crc kubenswrapper[4669]: I1210 15:23:10.302051 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:10 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:10 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:10 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:10 crc kubenswrapper[4669]: I1210 15:23:10.302113 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:10 crc kubenswrapper[4669]: I1210 15:23:10.751755 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:23:10 crc kubenswrapper[4669]: I1210 15:23:10.780543 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a72cfbf6-e882-4e1e-8809-b6735aae5dfe-metrics-certs\") pod \"network-metrics-daemon-rz9mm\" (UID: \"a72cfbf6-e882-4e1e-8809-b6735aae5dfe\") " pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:23:11 crc kubenswrapper[4669]: I1210 15:23:11.025579 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rz9mm" Dec 10 15:23:11 crc kubenswrapper[4669]: I1210 15:23:11.302816 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:11 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:11 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:11 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:11 crc kubenswrapper[4669]: I1210 15:23:11.302911 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:11 crc kubenswrapper[4669]: I1210 15:23:11.964199 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rz9mm"] Dec 10 15:23:12 crc kubenswrapper[4669]: W1210 15:23:12.018575 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda72cfbf6_e882_4e1e_8809_b6735aae5dfe.slice/crio-462cbf1220f1f654049cbf419dc93f7a721c575cdeff0b7d67a4bb31c8f80c7f WatchSource:0}: Error finding container 462cbf1220f1f654049cbf419dc93f7a721c575cdeff0b7d67a4bb31c8f80c7f: Status 404 returned error can't find the container with id 462cbf1220f1f654049cbf419dc93f7a721c575cdeff0b7d67a4bb31c8f80c7f Dec 10 15:23:12 crc kubenswrapper[4669]: I1210 15:23:12.195804 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" event={"ID":"a72cfbf6-e882-4e1e-8809-b6735aae5dfe","Type":"ContainerStarted","Data":"462cbf1220f1f654049cbf419dc93f7a721c575cdeff0b7d67a4bb31c8f80c7f"} Dec 10 15:23:12 crc kubenswrapper[4669]: I1210 15:23:12.342794 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:12 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:12 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:12 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:12 crc kubenswrapper[4669]: I1210 15:23:12.342883 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:13 crc kubenswrapper[4669]: I1210 15:23:13.227881 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" event={"ID":"a72cfbf6-e882-4e1e-8809-b6735aae5dfe","Type":"ContainerStarted","Data":"3c196a33b789d3714a7bd37f37bacd6d3a12c528c987290336b0f43de3db91de"} Dec 10 15:23:13 crc kubenswrapper[4669]: I1210 15:23:13.297510 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:13 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:13 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:13 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:13 crc kubenswrapper[4669]: I1210 15:23:13.297592 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:14 crc kubenswrapper[4669]: I1210 15:23:14.296349 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:14 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:14 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:14 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:14 crc kubenswrapper[4669]: I1210 15:23:14.296443 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.051778 4669 patch_prober.go:28] interesting pod/console-f9d7485db-dsw2s container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.052147 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dsw2s" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.275882 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.275943 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.275988 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.276132 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.276192 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.276593 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"bb995554bdf20e49edd559277498245224bbb59ba928887a3467707bd0f96768"} pod="openshift-console/downloads-7954f5f757-tscn9" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.276675 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" containerID="cri-o://bb995554bdf20e49edd559277498245224bbb59ba928887a3467707bd0f96768" gracePeriod=2 Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.276902 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.276996 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.281433 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rz9mm" event={"ID":"a72cfbf6-e882-4e1e-8809-b6735aae5dfe","Type":"ContainerStarted","Data":"ef35f41e08d4df5a4024cea5f7535adf58e1c221c940ee1edf91b68f43d64bf0"} Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.298797 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:15 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:15 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:15 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.298873 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:15 crc kubenswrapper[4669]: I1210 15:23:15.311668 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rz9mm" podStartSLOduration=148.311647795 podStartE2EDuration="2m28.311647795s" podCreationTimestamp="2025-12-10 15:20:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:23:15.310450847 +0000 UTC m=+169.227397484" watchObservedRunningTime="2025-12-10 15:23:15.311647795 +0000 UTC m=+169.228594422" Dec 10 15:23:16 crc kubenswrapper[4669]: I1210 15:23:16.305674 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:16 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:16 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:16 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:16 crc kubenswrapper[4669]: I1210 15:23:16.305740 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:17 crc kubenswrapper[4669]: I1210 15:23:17.296873 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:17 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:17 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:17 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:17 crc kubenswrapper[4669]: I1210 15:23:17.296949 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:17 crc kubenswrapper[4669]: I1210 15:23:17.330039 4669 generic.go:334] "Generic (PLEG): container finished" podID="41607592-fbbb-4003-b9eb-b11cbce16627" containerID="bb995554bdf20e49edd559277498245224bbb59ba928887a3467707bd0f96768" exitCode=0 Dec 10 15:23:17 crc kubenswrapper[4669]: I1210 15:23:17.330096 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tscn9" event={"ID":"41607592-fbbb-4003-b9eb-b11cbce16627","Type":"ContainerDied","Data":"bb995554bdf20e49edd559277498245224bbb59ba928887a3467707bd0f96768"} Dec 10 15:23:18 crc kubenswrapper[4669]: I1210 15:23:18.296849 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:18 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:18 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:18 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:18 crc kubenswrapper[4669]: I1210 15:23:18.297630 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:19 crc kubenswrapper[4669]: I1210 15:23:19.296736 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:19 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:19 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:19 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:19 crc kubenswrapper[4669]: I1210 15:23:19.296798 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:20 crc kubenswrapper[4669]: I1210 15:23:20.307955 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:20 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:20 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:20 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:20 crc kubenswrapper[4669]: I1210 15:23:20.308028 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:21 crc kubenswrapper[4669]: I1210 15:23:21.297112 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:21 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:21 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:21 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:21 crc kubenswrapper[4669]: I1210 15:23:21.297516 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:22 crc kubenswrapper[4669]: I1210 15:23:22.295347 4669 patch_prober.go:28] interesting pod/router-default-5444994796-b6x7l container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 10 15:23:22 crc kubenswrapper[4669]: [-]has-synced failed: reason withheld Dec 10 15:23:22 crc kubenswrapper[4669]: [+]process-running ok Dec 10 15:23:22 crc kubenswrapper[4669]: healthz check failed Dec 10 15:23:22 crc kubenswrapper[4669]: I1210 15:23:22.295402 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-b6x7l" podUID="dd110da0-eba9-484f-9786-ccb6b7bcf88f" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:23:23 crc kubenswrapper[4669]: I1210 15:23:23.296427 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:23:23 crc kubenswrapper[4669]: I1210 15:23:23.299286 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-b6x7l" Dec 10 15:23:23 crc kubenswrapper[4669]: I1210 15:23:23.747372 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:23:24 crc kubenswrapper[4669]: I1210 15:23:24.699145 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-vxbnr" Dec 10 15:23:25 crc kubenswrapper[4669]: I1210 15:23:25.053420 4669 patch_prober.go:28] interesting pod/console-f9d7485db-dsw2s container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" start-of-body= Dec 10 15:23:25 crc kubenswrapper[4669]: I1210 15:23:25.053743 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dsw2s" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" probeResult="failure" output="Get \"https://10.217.0.39:8443/health\": dial tcp 10.217.0.39:8443: connect: connection refused" Dec 10 15:23:25 crc kubenswrapper[4669]: I1210 15:23:25.276892 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:25 crc kubenswrapper[4669]: I1210 15:23:25.276965 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:28 crc kubenswrapper[4669]: I1210 15:23:28.744975 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:23:28 crc kubenswrapper[4669]: I1210 15:23:28.745646 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:23:33 crc kubenswrapper[4669]: I1210 15:23:33.829293 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 10 15:23:35 crc kubenswrapper[4669]: I1210 15:23:35.055694 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:23:35 crc kubenswrapper[4669]: I1210 15:23:35.060879 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:23:35 crc kubenswrapper[4669]: I1210 15:23:35.277037 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:35 crc kubenswrapper[4669]: I1210 15:23:35.277112 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609117 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 15:23:36 crc kubenswrapper[4669]: E1210 15:23:36.609467 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df5a0b2-9082-42eb-b5a2-bca588363646" containerName="pruner" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609484 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df5a0b2-9082-42eb-b5a2-bca588363646" containerName="pruner" Dec 10 15:23:36 crc kubenswrapper[4669]: E1210 15:23:36.609500 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41147672-f2da-4320-9e81-49a03b3698a4" containerName="pruner" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609507 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="41147672-f2da-4320-9e81-49a03b3698a4" containerName="pruner" Dec 10 15:23:36 crc kubenswrapper[4669]: E1210 15:23:36.609521 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a88168e0-0728-4c47-8d89-5ece2fa293b9" containerName="collect-profiles" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609527 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a88168e0-0728-4c47-8d89-5ece2fa293b9" containerName="collect-profiles" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609622 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df5a0b2-9082-42eb-b5a2-bca588363646" containerName="pruner" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609632 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="41147672-f2da-4320-9e81-49a03b3698a4" containerName="pruner" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.609641 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a88168e0-0728-4c47-8d89-5ece2fa293b9" containerName="collect-profiles" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.610022 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.617694 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.618896 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.668591 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.670127 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.670265 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.771528 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.771616 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.771685 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.824360 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:36 crc kubenswrapper[4669]: I1210 15:23:36.935108 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:41 crc kubenswrapper[4669]: I1210 15:23:41.619325 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 15:23:41 crc kubenswrapper[4669]: I1210 15:23:41.620817 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:41 crc kubenswrapper[4669]: I1210 15:23:41.625254 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 15:23:41 crc kubenswrapper[4669]: I1210 15:23:41.916649 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-kubelet-dir\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:41 crc kubenswrapper[4669]: I1210 15:23:41.916704 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-var-lock\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:41 crc kubenswrapper[4669]: I1210 15:23:41.916736 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28c45c59-6d2c-4162-9dd0-40cef9280420-kube-api-access\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.018827 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-kubelet-dir\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.018967 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-var-lock\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.019006 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-kubelet-dir\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.019052 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28c45c59-6d2c-4162-9dd0-40cef9280420-kube-api-access\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.019088 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-var-lock\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.040956 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28c45c59-6d2c-4162-9dd0-40cef9280420-kube-api-access\") pod \"installer-9-crc\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:42 crc kubenswrapper[4669]: I1210 15:23:42.253712 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:23:45 crc kubenswrapper[4669]: I1210 15:23:45.276052 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:45 crc kubenswrapper[4669]: I1210 15:23:45.277089 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:48 crc kubenswrapper[4669]: E1210 15:23:48.217279 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 10 15:23:48 crc kubenswrapper[4669]: E1210 15:23:48.217846 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cg755,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-lmf56_openshift-marketplace(e6b094d9-c376-4f11-8c0e-7764c92d1031): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:48 crc kubenswrapper[4669]: E1210 15:23:48.219604 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-lmf56" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.302193 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-lmf56" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.384586 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.385047 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2q2fx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-h89pm_openshift-marketplace(0f3441be-4b11-4f4a-b072-7ca1894c5f86): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.388153 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-h89pm" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.409566 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.409775 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p55pk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-t5l5h_openshift-marketplace(8d636e2e-c705-4462-bc33-88f18c5f3aa2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:50 crc kubenswrapper[4669]: E1210 15:23:50.411345 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-t5l5h" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.609480 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-h89pm" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.609937 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-t5l5h" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.674265 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.674441 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zsch4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-ssjx9_openshift-marketplace(71b77d7f-f74a-4442-a9df-2c36237983a2): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.675938 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-ssjx9" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.703105 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.703325 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lcllm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-vnn4r_openshift-marketplace(0c2d3166-9bdb-4d65-8c41-676fc90c8bb7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.705771 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-vnn4r" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.755894 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.756080 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qkpq8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-xmtb8_openshift-marketplace(1654a8ed-45e1-416b-9082-21c947d03a70): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:51 crc kubenswrapper[4669]: E1210 15:23:51.757492 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-xmtb8" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.339412 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-vnn4r" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.340065 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-xmtb8" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.340197 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-ssjx9" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.446918 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.447454 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fqd76,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-lb6d9_openshift-marketplace(99dd4f24-38ac-4110-a330-19ab7710acd9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.453289 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-lb6d9" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.521407 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.521570 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-sm6tx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qcjfk_openshift-marketplace(04c10baa-4c4e-4359-a93e-c76d6f5e1cb5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 10 15:23:53 crc kubenswrapper[4669]: E1210 15:23:53.522775 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qcjfk" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" Dec 10 15:23:53 crc kubenswrapper[4669]: I1210 15:23:53.889248 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 10 15:23:53 crc kubenswrapper[4669]: W1210 15:23:53.896951 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod28c45c59_6d2c_4162_9dd0_40cef9280420.slice/crio-86f8e2b81a816025d874c0d7fd640c8283024dcb3b7e76faba4836030b4f63ef WatchSource:0}: Error finding container 86f8e2b81a816025d874c0d7fd640c8283024dcb3b7e76faba4836030b4f63ef: Status 404 returned error can't find the container with id 86f8e2b81a816025d874c0d7fd640c8283024dcb3b7e76faba4836030b4f63ef Dec 10 15:23:53 crc kubenswrapper[4669]: I1210 15:23:53.993895 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 10 15:23:54 crc kubenswrapper[4669]: I1210 15:23:54.023428 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-tscn9" event={"ID":"41607592-fbbb-4003-b9eb-b11cbce16627","Type":"ContainerStarted","Data":"3099d99fa9fdde80ecc350c1edd0e46e5e1a02e77a7108997a86399ee5bd2e66"} Dec 10 15:23:54 crc kubenswrapper[4669]: I1210 15:23:54.023953 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:23:54 crc kubenswrapper[4669]: I1210 15:23:54.024118 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:54 crc kubenswrapper[4669]: I1210 15:23:54.024152 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:54 crc kubenswrapper[4669]: I1210 15:23:54.027290 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"28c45c59-6d2c-4162-9dd0-40cef9280420","Type":"ContainerStarted","Data":"86f8e2b81a816025d874c0d7fd640c8283024dcb3b7e76faba4836030b4f63ef"} Dec 10 15:23:54 crc kubenswrapper[4669]: E1210 15:23:54.034465 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qcjfk" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" Dec 10 15:23:54 crc kubenswrapper[4669]: E1210 15:23:54.034475 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-lb6d9" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" Dec 10 15:23:54 crc kubenswrapper[4669]: I1210 15:23:54.157298 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qbnt7"] Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.032009 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"5e8bb4ea-6298-40fe-8221-9d7106f0fc22","Type":"ContainerStarted","Data":"986c66158cc59b0695ea22aac6d0a95687f39aaab85c35142078787475d75a33"} Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.032503 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.032547 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.275412 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.275475 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.275486 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:55 crc kubenswrapper[4669]: I1210 15:23:55.275568 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:56 crc kubenswrapper[4669]: I1210 15:23:56.038543 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"28c45c59-6d2c-4162-9dd0-40cef9280420","Type":"ContainerStarted","Data":"807610508aad809bd517641bb2fd295969c3829c80035ca9eccc09ee1a54a6e6"} Dec 10 15:23:56 crc kubenswrapper[4669]: I1210 15:23:56.039978 4669 generic.go:334] "Generic (PLEG): container finished" podID="5e8bb4ea-6298-40fe-8221-9d7106f0fc22" containerID="1abfd916493694f67a0ac9051100fe5e3dac9c89f83ef52bbe7a6a41dd0fad52" exitCode=0 Dec 10 15:23:56 crc kubenswrapper[4669]: I1210 15:23:56.040053 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"5e8bb4ea-6298-40fe-8221-9d7106f0fc22","Type":"ContainerDied","Data":"1abfd916493694f67a0ac9051100fe5e3dac9c89f83ef52bbe7a6a41dd0fad52"} Dec 10 15:23:56 crc kubenswrapper[4669]: I1210 15:23:56.040472 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:23:56 crc kubenswrapper[4669]: I1210 15:23:56.040593 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:23:56 crc kubenswrapper[4669]: I1210 15:23:56.077510 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=15.077484469 podStartE2EDuration="15.077484469s" podCreationTimestamp="2025-12-10 15:23:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:23:56.058050485 +0000 UTC m=+209.974997112" watchObservedRunningTime="2025-12-10 15:23:56.077484469 +0000 UTC m=+209.994431096" Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.312784 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.439455 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kubelet-dir\") pod \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.439648 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5e8bb4ea-6298-40fe-8221-9d7106f0fc22" (UID: "5e8bb4ea-6298-40fe-8221-9d7106f0fc22"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.439815 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kube-api-access\") pod \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\" (UID: \"5e8bb4ea-6298-40fe-8221-9d7106f0fc22\") " Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.440190 4669 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.450523 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5e8bb4ea-6298-40fe-8221-9d7106f0fc22" (UID: "5e8bb4ea-6298-40fe-8221-9d7106f0fc22"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:23:57 crc kubenswrapper[4669]: I1210 15:23:57.541365 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e8bb4ea-6298-40fe-8221-9d7106f0fc22-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.053121 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"5e8bb4ea-6298-40fe-8221-9d7106f0fc22","Type":"ContainerDied","Data":"986c66158cc59b0695ea22aac6d0a95687f39aaab85c35142078787475d75a33"} Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.053159 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.053175 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="986c66158cc59b0695ea22aac6d0a95687f39aaab85c35142078787475d75a33" Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.745106 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.746390 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.746667 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.747666 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:23:58 crc kubenswrapper[4669]: I1210 15:23:58.748063 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9" gracePeriod=600 Dec 10 15:24:03 crc kubenswrapper[4669]: I1210 15:24:03.077912 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9" exitCode=0 Dec 10 15:24:03 crc kubenswrapper[4669]: I1210 15:24:03.077999 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9"} Dec 10 15:24:04 crc kubenswrapper[4669]: I1210 15:24:04.088173 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"818d0f66bc65130ac95f9c881dc856245c1d0a6c37a935776a040b20d9b6e12a"} Dec 10 15:24:05 crc kubenswrapper[4669]: I1210 15:24:05.275232 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:24:05 crc kubenswrapper[4669]: I1210 15:24:05.276335 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:24:05 crc kubenswrapper[4669]: I1210 15:24:05.275274 4669 patch_prober.go:28] interesting pod/downloads-7954f5f757-tscn9 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 10 15:24:05 crc kubenswrapper[4669]: I1210 15:24:05.276674 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-tscn9" podUID="41607592-fbbb-4003-b9eb-b11cbce16627" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.28:8080/\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 10 15:24:07 crc kubenswrapper[4669]: I1210 15:24:07.157367 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerStarted","Data":"5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9"} Dec 10 15:24:07 crc kubenswrapper[4669]: I1210 15:24:07.163380 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerStarted","Data":"6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e"} Dec 10 15:24:09 crc kubenswrapper[4669]: I1210 15:24:09.208670 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerStarted","Data":"742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64"} Dec 10 15:24:10 crc kubenswrapper[4669]: I1210 15:24:10.215400 4669 generic.go:334] "Generic (PLEG): container finished" podID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerID="6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e" exitCode=0 Dec 10 15:24:10 crc kubenswrapper[4669]: I1210 15:24:10.215501 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerDied","Data":"6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e"} Dec 10 15:24:10 crc kubenswrapper[4669]: I1210 15:24:10.220304 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerStarted","Data":"c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05"} Dec 10 15:24:10 crc kubenswrapper[4669]: I1210 15:24:10.222694 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerStarted","Data":"edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e"} Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.306430 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerDied","Data":"edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e"} Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.306292 4669 generic.go:334] "Generic (PLEG): container finished" podID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerID="edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e" exitCode=0 Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.310551 4669 generic.go:334] "Generic (PLEG): container finished" podID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerID="742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64" exitCode=0 Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.310650 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerDied","Data":"742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64"} Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.317644 4669 generic.go:334] "Generic (PLEG): container finished" podID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerID="5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9" exitCode=0 Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.317799 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerDied","Data":"5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9"} Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.321216 4669 generic.go:334] "Generic (PLEG): container finished" podID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerID="c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05" exitCode=0 Dec 10 15:24:11 crc kubenswrapper[4669]: I1210 15:24:11.321251 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerDied","Data":"c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05"} Dec 10 15:24:15 crc kubenswrapper[4669]: I1210 15:24:15.280868 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-tscn9" Dec 10 15:24:19 crc kubenswrapper[4669]: I1210 15:24:19.234972 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" containerID="cri-o://fbbe3a89692450f1cc4b0da3a430601cc7449c4e543cf789a6181c3a519d3ce7" gracePeriod=15 Dec 10 15:24:20 crc kubenswrapper[4669]: I1210 15:24:20.368744 4669 generic.go:334] "Generic (PLEG): container finished" podID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerID="fbbe3a89692450f1cc4b0da3a430601cc7449c4e543cf789a6181c3a519d3ce7" exitCode=0 Dec 10 15:24:20 crc kubenswrapper[4669]: I1210 15:24:20.368842 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" event={"ID":"95d741d8-41e2-4b8d-9fcd-b11f972345bf","Type":"ContainerDied","Data":"fbbe3a89692450f1cc4b0da3a430601cc7449c4e543cf789a6181c3a519d3ce7"} Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.624015 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.674031 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-f58fb8db6-4scnj"] Dec 10 15:24:22 crc kubenswrapper[4669]: E1210 15:24:22.674344 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.674363 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" Dec 10 15:24:22 crc kubenswrapper[4669]: E1210 15:24:22.674393 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e8bb4ea-6298-40fe-8221-9d7106f0fc22" containerName="pruner" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.674402 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e8bb4ea-6298-40fe-8221-9d7106f0fc22" containerName="pruner" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.674523 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e8bb4ea-6298-40fe-8221-9d7106f0fc22" containerName="pruner" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.674544 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" containerName="oauth-openshift" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.675264 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.707714 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f58fb8db6-4scnj"] Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.749837 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-error\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.751425 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-provider-selection\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.751513 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-ocp-branding-template\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.751540 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-serving-cert\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.751982 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w9kr\" (UniqueName: \"kubernetes.io/projected/95d741d8-41e2-4b8d-9fcd-b11f972345bf-kube-api-access-5w9kr\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752039 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-trusted-ca-bundle\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752066 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-dir\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752088 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-idp-0-file-data\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752110 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-service-ca\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752165 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-router-certs\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752188 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-login\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752247 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-cliconfig\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752274 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-session\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.752312 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-policies\") pod \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\" (UID: \"95d741d8-41e2-4b8d-9fcd-b11f972345bf\") " Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.753208 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.753259 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.754551 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.755109 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.755128 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.757534 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.758149 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.758509 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.759401 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95d741d8-41e2-4b8d-9fcd-b11f972345bf-kube-api-access-5w9kr" (OuterVolumeSpecName: "kube-api-access-5w9kr") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "kube-api-access-5w9kr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.759595 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.760280 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.761200 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.761315 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.763195 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "95d741d8-41e2-4b8d-9fcd-b11f972345bf" (UID: "95d741d8-41e2-4b8d-9fcd-b11f972345bf"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854415 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854499 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-error\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854683 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854787 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-session\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854850 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-router-certs\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854899 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.854976 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855451 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-service-ca\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855556 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-audit-dir\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855612 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855637 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbdfq\" (UniqueName: \"kubernetes.io/projected/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-kube-api-access-xbdfq\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855676 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-audit-policies\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855716 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-login\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855743 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855812 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855830 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855842 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855854 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w9kr\" (UniqueName: \"kubernetes.io/projected/95d741d8-41e2-4b8d-9fcd-b11f972345bf-kube-api-access-5w9kr\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855865 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855881 4669 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855892 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855901 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855911 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855922 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855932 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855944 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855969 4669 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/95d741d8-41e2-4b8d-9fcd-b11f972345bf-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.855979 4669 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/95d741d8-41e2-4b8d-9fcd-b11f972345bf-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957619 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-audit-dir\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957733 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957774 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbdfq\" (UniqueName: \"kubernetes.io/projected/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-kube-api-access-xbdfq\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957824 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-audit-policies\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957878 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-login\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957918 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-audit-dir\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.957929 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958042 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958110 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-error\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958194 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958330 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-session\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958391 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-router-certs\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958442 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.958502 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.960434 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-audit-policies\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.960877 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-service-ca\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.962124 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-service-ca\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.963448 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-serving-cert\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.964746 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.964872 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.965729 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.966048 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-cliconfig\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.966138 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.966432 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-router-certs\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.967671 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-system-session\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.967999 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-login\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.971114 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-v4-0-config-user-template-error\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:22 crc kubenswrapper[4669]: I1210 15:24:22.979572 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbdfq\" (UniqueName: \"kubernetes.io/projected/cfcbaa16-1bf8-4238-b9ba-5e87e592e55b-kube-api-access-xbdfq\") pod \"oauth-openshift-f58fb8db6-4scnj\" (UID: \"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b\") " pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.005127 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.308049 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-f58fb8db6-4scnj"] Dec 10 15:24:23 crc kubenswrapper[4669]: W1210 15:24:23.316489 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfcbaa16_1bf8_4238_b9ba_5e87e592e55b.slice/crio-a7167799ba9de1b6e751a7c4b4d122a6c078703a441c56bf012fab68a1560d2c WatchSource:0}: Error finding container a7167799ba9de1b6e751a7c4b4d122a6c078703a441c56bf012fab68a1560d2c: Status 404 returned error can't find the container with id a7167799ba9de1b6e751a7c4b4d122a6c078703a441c56bf012fab68a1560d2c Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.389626 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerStarted","Data":"5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0"} Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.396954 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.396940 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-qbnt7" event={"ID":"95d741d8-41e2-4b8d-9fcd-b11f972345bf","Type":"ContainerDied","Data":"e5a686627eb5668f623600eed2191a11fea86d2d907d1f815a8c81dccb714091"} Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.397196 4669 scope.go:117] "RemoveContainer" containerID="fbbe3a89692450f1cc4b0da3a430601cc7449c4e543cf789a6181c3a519d3ce7" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.399570 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerStarted","Data":"e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25"} Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.401123 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" event={"ID":"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b","Type":"ContainerStarted","Data":"a7167799ba9de1b6e751a7c4b4d122a6c078703a441c56bf012fab68a1560d2c"} Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.409995 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerStarted","Data":"dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50"} Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.416658 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerStarted","Data":"d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8"} Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.424087 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ssjx9" podStartSLOduration=9.458973749 podStartE2EDuration="1m30.424062729s" podCreationTimestamp="2025-12-10 15:22:53 +0000 UTC" firstStartedPulling="2025-12-10 15:23:02.093164988 +0000 UTC m=+156.010111615" lastFinishedPulling="2025-12-10 15:24:23.058253978 +0000 UTC m=+236.975200595" observedRunningTime="2025-12-10 15:24:23.409696296 +0000 UTC m=+237.326642923" watchObservedRunningTime="2025-12-10 15:24:23.424062729 +0000 UTC m=+237.341009356" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.451675 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h89pm" podStartSLOduration=6.121117258 podStartE2EDuration="1m33.451647878s" podCreationTimestamp="2025-12-10 15:22:50 +0000 UTC" firstStartedPulling="2025-12-10 15:22:55.702430355 +0000 UTC m=+149.619376982" lastFinishedPulling="2025-12-10 15:24:23.032960965 +0000 UTC m=+236.949907602" observedRunningTime="2025-12-10 15:24:23.450601562 +0000 UTC m=+237.367548189" watchObservedRunningTime="2025-12-10 15:24:23.451647878 +0000 UTC m=+237.368594505" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.454121 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lmf56" podStartSLOduration=6.830199175 podStartE2EDuration="1m29.454111627s" podCreationTimestamp="2025-12-10 15:22:54 +0000 UTC" firstStartedPulling="2025-12-10 15:23:00.449715973 +0000 UTC m=+154.366662600" lastFinishedPulling="2025-12-10 15:24:23.073628415 +0000 UTC m=+236.990575052" observedRunningTime="2025-12-10 15:24:23.42746533 +0000 UTC m=+237.344411957" watchObservedRunningTime="2025-12-10 15:24:23.454111627 +0000 UTC m=+237.371058244" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.470540 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qcjfk" podStartSLOduration=7.34011595 podStartE2EDuration="1m32.470216721s" podCreationTimestamp="2025-12-10 15:22:51 +0000 UTC" firstStartedPulling="2025-12-10 15:22:57.941800923 +0000 UTC m=+151.858747550" lastFinishedPulling="2025-12-10 15:24:23.071901674 +0000 UTC m=+236.988848321" observedRunningTime="2025-12-10 15:24:23.469680998 +0000 UTC m=+237.386627645" watchObservedRunningTime="2025-12-10 15:24:23.470216721 +0000 UTC m=+237.387163348" Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.491824 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qbnt7"] Dec 10 15:24:23 crc kubenswrapper[4669]: I1210 15:24:23.495571 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-qbnt7"] Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.442774 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95d741d8-41e2-4b8d-9fcd-b11f972345bf" path="/var/lib/kubelet/pods/95d741d8-41e2-4b8d-9fcd-b11f972345bf/volumes" Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.493932 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.494179 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.519239 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerStarted","Data":"6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189"} Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.527202 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerStarted","Data":"e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361"} Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.538904 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerStarted","Data":"a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3"} Dec 10 15:24:24 crc kubenswrapper[4669]: I1210 15:24:24.565936 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerStarted","Data":"19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d"} Dec 10 15:24:25 crc kubenswrapper[4669]: I1210 15:24:25.278549 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:24:25 crc kubenswrapper[4669]: I1210 15:24:25.279561 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:24:25 crc kubenswrapper[4669]: I1210 15:24:25.572621 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" event={"ID":"cfcbaa16-1bf8-4238-b9ba-5e87e592e55b","Type":"ContainerStarted","Data":"addb4a1e966dd95ca1bc439e66d08e756c9df37628ed8f35d40c861d17c0ef86"} Dec 10 15:24:25 crc kubenswrapper[4669]: I1210 15:24:25.575606 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:25 crc kubenswrapper[4669]: I1210 15:24:25.613780 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" podStartSLOduration=31.613762413 podStartE2EDuration="31.613762413s" podCreationTimestamp="2025-12-10 15:23:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:24:25.613495007 +0000 UTC m=+239.530441634" watchObservedRunningTime="2025-12-10 15:24:25.613762413 +0000 UTC m=+239.530709040" Dec 10 15:24:25 crc kubenswrapper[4669]: I1210 15:24:25.767588 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-lb6d9" podStartSLOduration=10.59663179 podStartE2EDuration="1m35.767570234s" podCreationTimestamp="2025-12-10 15:22:50 +0000 UTC" firstStartedPulling="2025-12-10 15:22:58.021805234 +0000 UTC m=+151.938751861" lastFinishedPulling="2025-12-10 15:24:23.192743678 +0000 UTC m=+237.109690305" observedRunningTime="2025-12-10 15:24:25.765837523 +0000 UTC m=+239.682784150" watchObservedRunningTime="2025-12-10 15:24:25.767570234 +0000 UTC m=+239.684516861" Dec 10 15:24:26 crc kubenswrapper[4669]: I1210 15:24:26.445156 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-f58fb8db6-4scnj" Dec 10 15:24:26 crc kubenswrapper[4669]: I1210 15:24:26.546711 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lmf56" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="registry-server" probeResult="failure" output=< Dec 10 15:24:26 crc kubenswrapper[4669]: timeout: failed to connect service ":50051" within 1s Dec 10 15:24:26 crc kubenswrapper[4669]: > Dec 10 15:24:26 crc kubenswrapper[4669]: I1210 15:24:26.563409 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-ssjx9" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="registry-server" probeResult="failure" output=< Dec 10 15:24:26 crc kubenswrapper[4669]: timeout: failed to connect service ":50051" within 1s Dec 10 15:24:26 crc kubenswrapper[4669]: > Dec 10 15:24:27 crc kubenswrapper[4669]: I1210 15:24:27.600164 4669 generic.go:334] "Generic (PLEG): container finished" podID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerID="a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3" exitCode=0 Dec 10 15:24:27 crc kubenswrapper[4669]: I1210 15:24:27.600924 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerDied","Data":"a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3"} Dec 10 15:24:28 crc kubenswrapper[4669]: I1210 15:24:28.610795 4669 generic.go:334] "Generic (PLEG): container finished" podID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerID="6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189" exitCode=0 Dec 10 15:24:28 crc kubenswrapper[4669]: I1210 15:24:28.610930 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerDied","Data":"6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189"} Dec 10 15:24:29 crc kubenswrapper[4669]: I1210 15:24:29.622143 4669 generic.go:334] "Generic (PLEG): container finished" podID="1654a8ed-45e1-416b-9082-21c947d03a70" containerID="19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d" exitCode=0 Dec 10 15:24:29 crc kubenswrapper[4669]: I1210 15:24:29.622248 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerDied","Data":"19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d"} Dec 10 15:24:30 crc kubenswrapper[4669]: I1210 15:24:30.629443 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerStarted","Data":"2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243"} Dec 10 15:24:30 crc kubenswrapper[4669]: I1210 15:24:30.649192 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-vnn4r" podStartSLOduration=7.757661582 podStartE2EDuration="1m37.649172218s" podCreationTimestamp="2025-12-10 15:22:53 +0000 UTC" firstStartedPulling="2025-12-10 15:22:59.365534145 +0000 UTC m=+153.282480772" lastFinishedPulling="2025-12-10 15:24:29.257044781 +0000 UTC m=+243.173991408" observedRunningTime="2025-12-10 15:24:30.647235512 +0000 UTC m=+244.564182159" watchObservedRunningTime="2025-12-10 15:24:30.649172218 +0000 UTC m=+244.566118845" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.086560 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.086996 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.204359 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.253208 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.253276 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.431117 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.679421 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:24:31 crc kubenswrapper[4669]: I1210 15:24:31.690019 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:24:32 crc kubenswrapper[4669]: I1210 15:24:32.043598 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:24:32 crc kubenswrapper[4669]: I1210 15:24:32.043989 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:24:32 crc kubenswrapper[4669]: I1210 15:24:32.087171 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:24:32 crc kubenswrapper[4669]: I1210 15:24:32.678962 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.027281 4669 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.028205 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.028375 4669 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.028844 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7" gracePeriod=15 Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.028938 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe" gracePeriod=15 Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.029087 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945" gracePeriod=15 Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.028972 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee" gracePeriod=15 Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.028837 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682" gracePeriod=15 Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.029687 4669 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.029947 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.029960 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.029972 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.029979 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.029991 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.029999 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.030012 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030018 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.030030 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030036 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.030044 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030051 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030139 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030150 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030163 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030170 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.030177 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.138840 4669 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.161350 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.161409 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.161429 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.161445 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.161467 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.162102 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.162356 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.162394 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264038 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264069 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264106 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264135 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264184 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264205 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264256 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264275 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264342 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264377 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264397 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264420 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264463 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264489 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264512 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.264536 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.439755 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:33 crc kubenswrapper[4669]: W1210 15:24:33.475042 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-58a0e83f74b7c1513aaa1545ccf5deafc0c1f715e893ca746ffd062354b1d314 WatchSource:0}: Error finding container 58a0e83f74b7c1513aaa1545ccf5deafc0c1f715e893ca746ffd062354b1d314: Status 404 returned error can't find the container with id 58a0e83f74b7c1513aaa1545ccf5deafc0c1f715e893ca746ffd062354b1d314 Dec 10 15:24:33 crc kubenswrapper[4669]: E1210 15:24:33.479078 4669 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fe4015e2c38ec openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 15:24:33.47858046 +0000 UTC m=+247.395527087,LastTimestamp:2025-12-10 15:24:33.47858046 +0000 UTC m=+247.395527087,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 15:24:33 crc kubenswrapper[4669]: I1210 15:24:33.650743 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"58a0e83f74b7c1513aaa1545ccf5deafc0c1f715e893ca746ffd062354b1d314"} Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.122241 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.123426 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.172952 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.173975 4669 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.174156 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.546815 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.547942 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.548483 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.593048 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.593792 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.594163 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.656464 4669 generic.go:334] "Generic (PLEG): container finished" podID="28c45c59-6d2c-4162-9dd0-40cef9280420" containerID="807610508aad809bd517641bb2fd295969c3829c80035ca9eccc09ee1a54a6e6" exitCode=0 Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.656550 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"28c45c59-6d2c-4162-9dd0-40cef9280420","Type":"ContainerDied","Data":"807610508aad809bd517641bb2fd295969c3829c80035ca9eccc09ee1a54a6e6"} Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.657252 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.657768 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.658008 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.659671 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.660257 4669 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7" exitCode=0 Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.660282 4669 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee" exitCode=0 Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.660291 4669 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe" exitCode=0 Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.660316 4669 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945" exitCode=2 Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.661768 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerStarted","Data":"ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0"} Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.662886 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.663377 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"c132be169f117b802d1a1f819e11d5731bd37db395da70b342b0b84da3dc6cf6"} Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.663391 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.663805 4669 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.663909 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.664200 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.664502 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.664818 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.665099 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.665402 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.665984 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerStarted","Data":"df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42"} Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.666647 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.666891 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.667082 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.667264 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.667438 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.714675 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.717649 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.718022 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.718291 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.718548 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: I1210 15:24:34.718733 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.743374 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:24:34Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:24:34Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:24:34Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-10T15:24:34Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.743884 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.744538 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.744776 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.745029 4669 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:34 crc kubenswrapper[4669]: E1210 15:24:34.745118 4669 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.362721 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.363350 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.363755 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.364009 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.364173 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.364323 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.364454 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.447598 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.448531 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.449015 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.449360 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.449596 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.449803 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.450014 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.511750 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.512417 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.513566 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.514033 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.514511 4669 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.514867 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.515282 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.515604 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.515893 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.673645 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.674710 4669 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682" exitCode=0 Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.674830 4669 scope.go:117] "RemoveContainer" containerID="d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.674791 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.675610 4669 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.698558 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.698622 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.698657 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.698983 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.699023 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.699043 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.702651 4669 scope.go:117] "RemoveContainer" containerID="cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.715132 4669 scope.go:117] "RemoveContainer" containerID="b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.741391 4669 scope.go:117] "RemoveContainer" containerID="18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.760992 4669 scope.go:117] "RemoveContainer" containerID="c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.777616 4669 scope.go:117] "RemoveContainer" containerID="c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.800084 4669 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.800516 4669 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.800528 4669 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.824613 4669 scope.go:117] "RemoveContainer" containerID="d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.825288 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\": container with ID starting with d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7 not found: ID does not exist" containerID="d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.825338 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7"} err="failed to get container status \"d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\": rpc error: code = NotFound desc = could not find container \"d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7\": container with ID starting with d581ce2586a651a628462d884cac187077a140f661cfec2d627c251ec37c98b7 not found: ID does not exist" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.825374 4669 scope.go:117] "RemoveContainer" containerID="cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.829516 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\": container with ID starting with cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee not found: ID does not exist" containerID="cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.829567 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee"} err="failed to get container status \"cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\": rpc error: code = NotFound desc = could not find container \"cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee\": container with ID starting with cc821beae81799123d24661dd2fb9e7e6021075c39ec30c7af30a76c5b791fee not found: ID does not exist" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.829604 4669 scope.go:117] "RemoveContainer" containerID="b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.830418 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\": container with ID starting with b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe not found: ID does not exist" containerID="b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.830451 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe"} err="failed to get container status \"b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\": rpc error: code = NotFound desc = could not find container \"b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe\": container with ID starting with b0503c585b5d33d609146e1077fe3fcb4d010539502b82cdac43bf51442634fe not found: ID does not exist" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.830473 4669 scope.go:117] "RemoveContainer" containerID="18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.831367 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\": container with ID starting with 18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945 not found: ID does not exist" containerID="18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.831398 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945"} err="failed to get container status \"18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\": rpc error: code = NotFound desc = could not find container \"18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945\": container with ID starting with 18f7066405151381368cd8c20e8f9e82a5b0f4a2c6ba985b3b67b658635e2945 not found: ID does not exist" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.831422 4669 scope.go:117] "RemoveContainer" containerID="c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.831688 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\": container with ID starting with c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682 not found: ID does not exist" containerID="c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.831715 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682"} err="failed to get container status \"c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\": rpc error: code = NotFound desc = could not find container \"c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682\": container with ID starting with c21f11bc1eeaf6920cf4e1b98e82e1cddd7f9ca1a477a121a709c7e4a269b682 not found: ID does not exist" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.831733 4669 scope.go:117] "RemoveContainer" containerID="c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8" Dec 10 15:24:35 crc kubenswrapper[4669]: E1210 15:24:35.834677 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\": container with ID starting with c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8 not found: ID does not exist" containerID="c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.834726 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8"} err="failed to get container status \"c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\": rpc error: code = NotFound desc = could not find container \"c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8\": container with ID starting with c760f17e81215b27ffb166cccad884a867b85b7ecfa74baaf2583b5f7683e9d8 not found: ID does not exist" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.977204 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.977806 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.978229 4669 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.978702 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.979005 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.979334 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.979607 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.980005 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.988701 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.988986 4669 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.989236 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.989458 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.989693 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.989913 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:35 crc kubenswrapper[4669]: I1210 15:24:35.990143 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.106178 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28c45c59-6d2c-4162-9dd0-40cef9280420-kube-api-access\") pod \"28c45c59-6d2c-4162-9dd0-40cef9280420\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.106249 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-var-lock\") pod \"28c45c59-6d2c-4162-9dd0-40cef9280420\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.106299 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-kubelet-dir\") pod \"28c45c59-6d2c-4162-9dd0-40cef9280420\" (UID: \"28c45c59-6d2c-4162-9dd0-40cef9280420\") " Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.106350 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-var-lock" (OuterVolumeSpecName: "var-lock") pod "28c45c59-6d2c-4162-9dd0-40cef9280420" (UID: "28c45c59-6d2c-4162-9dd0-40cef9280420"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.106471 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "28c45c59-6d2c-4162-9dd0-40cef9280420" (UID: "28c45c59-6d2c-4162-9dd0-40cef9280420"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.106552 4669 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-var-lock\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.111691 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28c45c59-6d2c-4162-9dd0-40cef9280420-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "28c45c59-6d2c-4162-9dd0-40cef9280420" (UID: "28c45c59-6d2c-4162-9dd0-40cef9280420"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.208232 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/28c45c59-6d2c-4162-9dd0-40cef9280420-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.208263 4669 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/28c45c59-6d2c-4162-9dd0-40cef9280420-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.400620 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.401255 4669 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.401471 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.401731 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.401934 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.402128 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.404268 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.405801 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.693012 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.693001 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"28c45c59-6d2c-4162-9dd0-40cef9280420","Type":"ContainerDied","Data":"86f8e2b81a816025d874c0d7fd640c8283024dcb3b7e76faba4836030b4f63ef"} Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.693372 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86f8e2b81a816025d874c0d7fd640c8283024dcb3b7e76faba4836030b4f63ef" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.698076 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.698270 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.698408 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.698540 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.698669 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:36 crc kubenswrapper[4669]: I1210 15:24:36.698819 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.798961 4669 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.800608 4669 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.801016 4669 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.801365 4669 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.801642 4669 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:40 crc kubenswrapper[4669]: I1210 15:24:40.801671 4669 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.801911 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="200ms" Dec 10 15:24:40 crc kubenswrapper[4669]: E1210 15:24:40.961823 4669 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.110:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187fe4015e2c38ec openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-10 15:24:33.47858046 +0000 UTC m=+247.395527087,LastTimestamp:2025-12-10 15:24:33.47858046 +0000 UTC m=+247.395527087,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 10 15:24:41 crc kubenswrapper[4669]: E1210 15:24:41.003656 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="400ms" Dec 10 15:24:41 crc kubenswrapper[4669]: E1210 15:24:41.405101 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="800ms" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.080999 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.083062 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.119137 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.119746 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.120002 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.120277 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.120489 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.120720 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.120983 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: E1210 15:24:42.206366 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="1.6s" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.766252 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.766755 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.766996 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.767469 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.767648 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.767799 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:42 crc kubenswrapper[4669]: I1210 15:24:42.767950 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:43 crc kubenswrapper[4669]: E1210 15:24:43.807119 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="3.2s" Dec 10 15:24:44 crc kubenswrapper[4669]: E1210 15:24:44.465515 4669 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.110:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" volumeName="registry-storage" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.493731 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.493788 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.544722 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.545191 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.545675 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.545962 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.546410 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.546809 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.547078 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.791477 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.792145 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.792436 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.793078 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.793540 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.793735 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:44 crc kubenswrapper[4669]: I1210 15:24:44.794062 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.397603 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.399109 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.399298 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.399460 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.399617 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.399785 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.399935 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.412380 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.412404 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:45 crc kubenswrapper[4669]: E1210 15:24:45.412655 4669 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.413019 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:45 crc kubenswrapper[4669]: I1210 15:24:45.746043 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b6699d61795e33594694f1b50517481a490a0757c76dfa5173a2be288c8ae551"} Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.413277 4669 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.416148 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.416607 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.417043 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.417450 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.417851 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:46 crc kubenswrapper[4669]: I1210 15:24:46.418587 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:47 crc kubenswrapper[4669]: E1210 15:24:47.008722 4669 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.110:6443: connect: connection refused" interval="6.4s" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.773150 4669 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="73368f59961c04893f4f6ccfb2839b6686136d51eb070a5c0d4b6b9e38c788f1" exitCode=0 Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.773314 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"73368f59961c04893f4f6ccfb2839b6686136d51eb070a5c0d4b6b9e38c788f1"} Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.773820 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.773843 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:48 crc kubenswrapper[4669]: E1210 15:24:48.774498 4669 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.774510 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.774767 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.774961 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.775178 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.776323 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.776804 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.777242 4669 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.779184 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.779281 4669 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5" exitCode=1 Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.779324 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5"} Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.779903 4669 scope.go:117] "RemoveContainer" containerID="647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.780278 4669 status_manager.go:851] "Failed to get status for pod" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" pod="openshift-marketplace/certified-operators-t5l5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-t5l5h\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.780705 4669 status_manager.go:851] "Failed to get status for pod" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" pod="openshift-marketplace/redhat-operators-lmf56" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-lmf56\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.781041 4669 status_manager.go:851] "Failed to get status for pod" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" pod="openshift-marketplace/redhat-operators-xmtb8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-xmtb8\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.781322 4669 status_manager.go:851] "Failed to get status for pod" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" pod="openshift-marketplace/redhat-marketplace-ssjx9" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-ssjx9\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.781666 4669 status_manager.go:851] "Failed to get status for pod" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" pod="openshift-marketplace/redhat-marketplace-vnn4r" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-vnn4r\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.781973 4669 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.782316 4669 status_manager.go:851] "Failed to get status for pod" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:48 crc kubenswrapper[4669]: I1210 15:24:48.782515 4669 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.110:6443: connect: connection refused" Dec 10 15:24:49 crc kubenswrapper[4669]: I1210 15:24:49.789515 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 15:24:49 crc kubenswrapper[4669]: I1210 15:24:49.791177 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"15177e5d023ebcbf298cc88874620ef1e93d78772b9a2cf1ff30995b4ee72dab"} Dec 10 15:24:49 crc kubenswrapper[4669]: I1210 15:24:49.794713 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d4802da8145568d695a941fa22eba4a61a9da5793451f104a3dbdd0c3e9d50f7"} Dec 10 15:24:49 crc kubenswrapper[4669]: I1210 15:24:49.794848 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"603ddd7140a2ac6aca7a22a341d65c64385fb8c31fc23801780cbf95aa3dd954"} Dec 10 15:24:50 crc kubenswrapper[4669]: I1210 15:24:50.276838 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:24:50 crc kubenswrapper[4669]: I1210 15:24:50.277370 4669 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 15:24:50 crc kubenswrapper[4669]: I1210 15:24:50.277469 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 15:24:51 crc kubenswrapper[4669]: I1210 15:24:51.821876 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"52723e102b14961cad221009fd0a473c52de4e320c0642dbc37a7d6a79d3cbdc"} Dec 10 15:24:52 crc kubenswrapper[4669]: I1210 15:24:52.829034 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e36588629aec800ab4c3a21c58e230473d843edeca9d006d63fef44653a607a5"} Dec 10 15:24:52 crc kubenswrapper[4669]: I1210 15:24:52.829322 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"49653c45f40aac25fc973e0458f15e90c37960c72e2faba7825669d84779abc3"} Dec 10 15:24:52 crc kubenswrapper[4669]: I1210 15:24:52.829586 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:52 crc kubenswrapper[4669]: I1210 15:24:52.829604 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:52 crc kubenswrapper[4669]: I1210 15:24:52.829789 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:52 crc kubenswrapper[4669]: I1210 15:24:52.840098 4669 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:53 crc kubenswrapper[4669]: I1210 15:24:53.836085 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:53 crc kubenswrapper[4669]: I1210 15:24:53.836121 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:53 crc kubenswrapper[4669]: I1210 15:24:53.983860 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.413573 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.413872 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.414273 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.414289 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.420874 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.847816 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.847852 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:55 crc kubenswrapper[4669]: I1210 15:24:55.851618 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:24:56 crc kubenswrapper[4669]: I1210 15:24:56.237348 4669 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="1cd055eb-392a-4e8d-a84f-8d37349c6eb9" Dec 10 15:24:56 crc kubenswrapper[4669]: I1210 15:24:56.852198 4669 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:56 crc kubenswrapper[4669]: I1210 15:24:56.852241 4669 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="58930104-b0aa-4955-93ac-98fa4a576fef" Dec 10 15:24:56 crc kubenswrapper[4669]: I1210 15:24:56.855756 4669 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="1cd055eb-392a-4e8d-a84f-8d37349c6eb9" Dec 10 15:25:00 crc kubenswrapper[4669]: I1210 15:25:00.276928 4669 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 15:25:00 crc kubenswrapper[4669]: I1210 15:25:00.277602 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 15:25:05 crc kubenswrapper[4669]: I1210 15:25:05.791037 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 10 15:25:06 crc kubenswrapper[4669]: I1210 15:25:06.576232 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 15:25:06 crc kubenswrapper[4669]: I1210 15:25:06.621306 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 10 15:25:06 crc kubenswrapper[4669]: I1210 15:25:06.730528 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 10 15:25:06 crc kubenswrapper[4669]: I1210 15:25:06.773483 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 10 15:25:06 crc kubenswrapper[4669]: I1210 15:25:06.919384 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 10 15:25:06 crc kubenswrapper[4669]: I1210 15:25:06.938977 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.006077 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.107781 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.187045 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.339672 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.365190 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.385409 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.706886 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.856471 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 10 15:25:07 crc kubenswrapper[4669]: I1210 15:25:07.860405 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.097592 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.116847 4669 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.484111 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.557683 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.629690 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.723399 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 10 15:25:08 crc kubenswrapper[4669]: I1210 15:25:08.855181 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.131493 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.234822 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.289603 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.352477 4669 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.355733 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t5l5h" podStartSLOduration=43.843441725 podStartE2EDuration="2m18.355718339s" podCreationTimestamp="2025-12-10 15:22:51 +0000 UTC" firstStartedPulling="2025-12-10 15:22:57.983169641 +0000 UTC m=+151.900116268" lastFinishedPulling="2025-12-10 15:24:32.495446255 +0000 UTC m=+246.412392882" observedRunningTime="2025-12-10 15:24:56.273565212 +0000 UTC m=+270.190511869" watchObservedRunningTime="2025-12-10 15:25:09.355718339 +0000 UTC m=+283.272664966" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.355877 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xmtb8" podStartSLOduration=42.540936864 podStartE2EDuration="2m16.355873443s" podCreationTimestamp="2025-12-10 15:22:53 +0000 UTC" firstStartedPulling="2025-12-10 15:23:00.377446418 +0000 UTC m=+154.294393045" lastFinishedPulling="2025-12-10 15:24:34.192382997 +0000 UTC m=+248.109329624" observedRunningTime="2025-12-10 15:24:56.344229492 +0000 UTC m=+270.261176139" watchObservedRunningTime="2025-12-10 15:25:09.355873443 +0000 UTC m=+283.272820060" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.356413 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.356463 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.360393 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.382654 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=17.382632696 podStartE2EDuration="17.382632696s" podCreationTimestamp="2025-12-10 15:24:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:25:09.377360811 +0000 UTC m=+283.294307448" watchObservedRunningTime="2025-12-10 15:25:09.382632696 +0000 UTC m=+283.299579323" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.454420 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.532975 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.600471 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.661548 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.703996 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.748826 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.759280 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.760360 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.794825 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.848833 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 10 15:25:09 crc kubenswrapper[4669]: I1210 15:25:09.929913 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.034505 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.062947 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.129695 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.266902 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.276097 4669 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.276148 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.276193 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.277326 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"15177e5d023ebcbf298cc88874620ef1e93d78772b9a2cf1ff30995b4ee72dab"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.277473 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://15177e5d023ebcbf298cc88874620ef1e93d78772b9a2cf1ff30995b4ee72dab" gracePeriod=30 Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.451958 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.471051 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.481166 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.515776 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.596766 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.613601 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.666035 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.692155 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.716357 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.774523 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.949369 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 10 15:25:10 crc kubenswrapper[4669]: I1210 15:25:10.964089 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.019820 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.081002 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.209473 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.230663 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.269734 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.365655 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.467480 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.474040 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.498028 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.557788 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.593123 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.664280 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.697754 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.779471 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.811360 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.910935 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 10 15:25:11 crc kubenswrapper[4669]: I1210 15:25:11.988820 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.011194 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.021598 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.140043 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.174026 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.261190 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.339054 4669 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.354406 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.407134 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.429320 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 10 15:25:12 crc kubenswrapper[4669]: I1210 15:25:12.441171 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.501027 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.550940 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.616284 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.636940 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.681400 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.688284 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.696553 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.718939 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.720803 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.746992 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.795531 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.832702 4669 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.850528 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.898956 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.900899 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:12.975037 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.035294 4669 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.062630 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.182134 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.264954 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.346043 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.368097 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.432666 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.639786 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.689963 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.775492 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.858247 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:13.927471 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.052340 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.078378 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.136906 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.142666 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.219802 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.250281 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.253298 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.296383 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.312611 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.321782 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.326100 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.450337 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.487429 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.522534 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.550668 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.584706 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.610599 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.651919 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.726849 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.777012 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.808129 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.886018 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.932548 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.961339 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:14.981477 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.347307 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.373880 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.384606 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.427557 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.437100 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.553415 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.566415 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.612286 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.620670 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.623413 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.659701 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.674439 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.692074 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.732223 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.783649 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.803909 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 10 15:25:15 crc kubenswrapper[4669]: I1210 15:25:15.829918 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.041004 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.045556 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.070759 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.185016 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.232312 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.238508 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.395085 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.403370 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.419965 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.442638 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.530233 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.581419 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.670935 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.704922 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.717221 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.754419 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.776456 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.786761 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.829663 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.894884 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.902457 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 10 15:25:16 crc kubenswrapper[4669]: I1210 15:25:16.916178 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.080090 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.134597 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.183743 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.286044 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.306232 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.363088 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.380695 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.383758 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.465348 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.720626 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.736615 4669 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.737269 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://c132be169f117b802d1a1f819e11d5731bd37db395da70b342b0b84da3dc6cf6" gracePeriod=5 Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.750518 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.878711 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 10 15:25:17 crc kubenswrapper[4669]: I1210 15:25:17.921266 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.000600 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.010810 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.162714 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.188741 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.202262 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.232636 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.245198 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.402576 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.417989 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.563966 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.569254 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.651970 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.684464 4669 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.727582 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.774067 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.776163 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.812751 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.933691 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 10 15:25:18 crc kubenswrapper[4669]: I1210 15:25:18.948946 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.084594 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.108592 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.109164 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.137610 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.148699 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.192495 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.221043 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.252240 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.295471 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.547074 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.590484 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.594893 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.695771 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.854378 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.882396 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.972562 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 10 15:25:19 crc kubenswrapper[4669]: I1210 15:25:19.973004 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.371750 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.472260 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.670691 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.677627 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.799071 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.801967 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.927168 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 10 15:25:20 crc kubenswrapper[4669]: I1210 15:25:20.974195 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.048575 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.164264 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.173304 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.197585 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.334087 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.520744 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.523180 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.587248 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 15:25:21 crc kubenswrapper[4669]: I1210 15:25:21.688611 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.008191 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h89pm"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.008467 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h89pm" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="registry-server" containerID="cri-o://d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.024377 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t5l5h"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.024651 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t5l5h" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="registry-server" containerID="cri-o://ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.029896 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.038149 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lb6d9"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.038429 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-lb6d9" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="registry-server" containerID="cri-o://e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.064249 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qcjfk"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.064544 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qcjfk" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="registry-server" containerID="cri-o://dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.072851 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6c97z"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.073063 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" containerID="cri-o://f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.094199 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ssjx9"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.094521 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ssjx9" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="registry-server" containerID="cri-o://5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.099465 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0 is running failed: container process not found" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.108918 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0 is running failed: container process not found" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.112731 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0 is running failed: container process not found" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.112840 4669 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-t5l5h" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="registry-server" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.123012 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnn4r"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.123812 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-vnn4r" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="registry-server" containerID="cri-o://2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.130333 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lmf56"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.130608 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lmf56" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="registry-server" containerID="cri-o://e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.144420 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xmtb8"] Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.144692 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xmtb8" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="registry-server" containerID="cri-o://df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42" gracePeriod=30 Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.164139 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.174429 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: EOF, stdout: , stderr: , exit code -1" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.175030 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 is running failed: container process not found" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.176356 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 is running failed: container process not found" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.176398 4669 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 is running failed: container process not found" probeType="Liveness" pod="openshift-marketplace/community-operators-qcjfk" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="registry-server" Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.186002 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: EOF, stdout: , stderr: , exit code -1" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.189215 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 is running failed: container process not found" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.190019 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 is running failed: container process not found" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" cmd=["grpc_health_probe","-addr=:50051"] Dec 10 15:25:22 crc kubenswrapper[4669]: E1210 15:25:22.190053 4669 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-qcjfk" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="registry-server" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.347184 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.478684 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.633493 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.637468 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.649734 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.668531 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.704400 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.731952 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-operator-metrics\") pod \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732060 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-utilities\") pod \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732094 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsch4\" (UniqueName: \"kubernetes.io/projected/71b77d7f-f74a-4442-a9df-2c36237983a2-kube-api-access-zsch4\") pod \"71b77d7f-f74a-4442-a9df-2c36237983a2\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732135 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q2fx\" (UniqueName: \"kubernetes.io/projected/0f3441be-4b11-4f4a-b072-7ca1894c5f86-kube-api-access-2q2fx\") pod \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732155 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-utilities\") pod \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732201 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-catalog-content\") pod \"71b77d7f-f74a-4442-a9df-2c36237983a2\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732407 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-catalog-content\") pod \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732462 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-trusted-ca\") pod \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732488 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-catalog-content\") pod \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\" (UID: \"0f3441be-4b11-4f4a-b072-7ca1894c5f86\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732625 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-utilities\") pod \"71b77d7f-f74a-4442-a9df-2c36237983a2\" (UID: \"71b77d7f-f74a-4442-a9df-2c36237983a2\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732676 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p55pk\" (UniqueName: \"kubernetes.io/projected/8d636e2e-c705-4462-bc33-88f18c5f3aa2-kube-api-access-p55pk\") pod \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\" (UID: \"8d636e2e-c705-4462-bc33-88f18c5f3aa2\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.732708 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbst6\" (UniqueName: \"kubernetes.io/projected/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-kube-api-access-hbst6\") pod \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\" (UID: \"a40f1577-aae9-4e5c-bfdb-21dd1a00445d\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.733567 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-utilities" (OuterVolumeSpecName: "utilities") pod "8d636e2e-c705-4462-bc33-88f18c5f3aa2" (UID: "8d636e2e-c705-4462-bc33-88f18c5f3aa2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.734132 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-utilities" (OuterVolumeSpecName: "utilities") pod "71b77d7f-f74a-4442-a9df-2c36237983a2" (UID: "71b77d7f-f74a-4442-a9df-2c36237983a2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.734715 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-utilities" (OuterVolumeSpecName: "utilities") pod "0f3441be-4b11-4f4a-b072-7ca1894c5f86" (UID: "0f3441be-4b11-4f4a-b072-7ca1894c5f86"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.734790 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "a40f1577-aae9-4e5c-bfdb-21dd1a00445d" (UID: "a40f1577-aae9-4e5c-bfdb-21dd1a00445d"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.761064 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/71b77d7f-f74a-4442-a9df-2c36237983a2-kube-api-access-zsch4" (OuterVolumeSpecName: "kube-api-access-zsch4") pod "71b77d7f-f74a-4442-a9df-2c36237983a2" (UID: "71b77d7f-f74a-4442-a9df-2c36237983a2"). InnerVolumeSpecName "kube-api-access-zsch4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.771850 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d636e2e-c705-4462-bc33-88f18c5f3aa2-kube-api-access-p55pk" (OuterVolumeSpecName: "kube-api-access-p55pk") pod "8d636e2e-c705-4462-bc33-88f18c5f3aa2" (UID: "8d636e2e-c705-4462-bc33-88f18c5f3aa2"). InnerVolumeSpecName "kube-api-access-p55pk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.773273 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f3441be-4b11-4f4a-b072-7ca1894c5f86-kube-api-access-2q2fx" (OuterVolumeSpecName: "kube-api-access-2q2fx") pod "0f3441be-4b11-4f4a-b072-7ca1894c5f86" (UID: "0f3441be-4b11-4f4a-b072-7ca1894c5f86"). InnerVolumeSpecName "kube-api-access-2q2fx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.775909 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-kube-api-access-hbst6" (OuterVolumeSpecName: "kube-api-access-hbst6") pod "a40f1577-aae9-4e5c-bfdb-21dd1a00445d" (UID: "a40f1577-aae9-4e5c-bfdb-21dd1a00445d"). InnerVolumeSpecName "kube-api-access-hbst6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.791892 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "71b77d7f-f74a-4442-a9df-2c36237983a2" (UID: "71b77d7f-f74a-4442-a9df-2c36237983a2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.794030 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "a40f1577-aae9-4e5c-bfdb-21dd1a00445d" (UID: "a40f1577-aae9-4e5c-bfdb-21dd1a00445d"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.808703 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8d636e2e-c705-4462-bc33-88f18c5f3aa2" (UID: "8d636e2e-c705-4462-bc33-88f18c5f3aa2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.818559 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0f3441be-4b11-4f4a-b072-7ca1894c5f86" (UID: "0f3441be-4b11-4f4a-b072-7ca1894c5f86"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.833958 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.833994 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsch4\" (UniqueName: \"kubernetes.io/projected/71b77d7f-f74a-4442-a9df-2c36237983a2-kube-api-access-zsch4\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834006 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q2fx\" (UniqueName: \"kubernetes.io/projected/0f3441be-4b11-4f4a-b072-7ca1894c5f86-kube-api-access-2q2fx\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834016 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834028 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834039 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8d636e2e-c705-4462-bc33-88f18c5f3aa2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834049 4669 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834058 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0f3441be-4b11-4f4a-b072-7ca1894c5f86-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834068 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/71b77d7f-f74a-4442-a9df-2c36237983a2-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834077 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p55pk\" (UniqueName: \"kubernetes.io/projected/8d636e2e-c705-4462-bc33-88f18c5f3aa2-kube-api-access-p55pk\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834086 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbst6\" (UniqueName: \"kubernetes.io/projected/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-kube-api-access-hbst6\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.834097 4669 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/a40f1577-aae9-4e5c-bfdb-21dd1a00445d-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.901263 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.937012 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcllm\" (UniqueName: \"kubernetes.io/projected/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-kube-api-access-lcllm\") pod \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.937125 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-catalog-content\") pod \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.937187 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-utilities\") pod \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\" (UID: \"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7\") " Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.938514 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-utilities" (OuterVolumeSpecName: "utilities") pod "0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" (UID: "0c2d3166-9bdb-4d65-8c41-676fc90c8bb7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.944635 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-kube-api-access-lcllm" (OuterVolumeSpecName: "kube-api-access-lcllm") pod "0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" (UID: "0c2d3166-9bdb-4d65-8c41-676fc90c8bb7"). InnerVolumeSpecName "kube-api-access-lcllm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.969194 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" (UID: "0c2d3166-9bdb-4d65-8c41-676fc90c8bb7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.975762 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.986793 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:25:22 crc kubenswrapper[4669]: I1210 15:25:22.991290 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.039033 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcllm\" (UniqueName: \"kubernetes.io/projected/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-kube-api-access-lcllm\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.039064 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.039075 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.040287 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.040709 4669 generic.go:334] "Generic (PLEG): container finished" podID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerID="2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.040763 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerDied","Data":"2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.040790 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-vnn4r" event={"ID":"0c2d3166-9bdb-4d65-8c41-676fc90c8bb7","Type":"ContainerDied","Data":"34d212d714d5c08d5fc57ff2ba17e43805cf3104a6e2e0a63a86f9204fbc1ef0"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.040806 4669 scope.go:117] "RemoveContainer" containerID="2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.040967 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-vnn4r" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.044099 4669 generic.go:334] "Generic (PLEG): container finished" podID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerID="5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.044184 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerDied","Data":"5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.044361 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ssjx9" event={"ID":"71b77d7f-f74a-4442-a9df-2c36237983a2","Type":"ContainerDied","Data":"f941591895d45cc6a6c2aec6e9182bdd1df3bd856bc8ffdb9cdf8d9eacbe9332"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.044394 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ssjx9" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.049830 4669 generic.go:334] "Generic (PLEG): container finished" podID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.049912 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerDied","Data":"dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.049952 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qcjfk" event={"ID":"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5","Type":"ContainerDied","Data":"762739c17969b30db736f0142ec8b018fb076b32a4a5a0fbfbb1f19c2a090f93"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.050039 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qcjfk" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.055544 4669 generic.go:334] "Generic (PLEG): container finished" podID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerID="e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.055620 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerDied","Data":"e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.055651 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lmf56" event={"ID":"e6b094d9-c376-4f11-8c0e-7764c92d1031","Type":"ContainerDied","Data":"6224465508cc0cbed9b1ad57b3695be0ebf2bc5c862d46c3aa83c942f0280b60"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.055747 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lmf56" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.069530 4669 generic.go:334] "Generic (PLEG): container finished" podID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.069801 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t5l5h" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.070031 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerDied","Data":"ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.070075 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t5l5h" event={"ID":"8d636e2e-c705-4462-bc33-88f18c5f3aa2","Type":"ContainerDied","Data":"7368bbce2e383e68878d2dc55e232ce50aec7a999ed2ef0297cbddab68c34c0e"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.071586 4669 scope.go:117] "RemoveContainer" containerID="a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.075747 4669 generic.go:334] "Generic (PLEG): container finished" podID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerID="e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.075835 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerDied","Data":"e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.075872 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-lb6d9" event={"ID":"99dd4f24-38ac-4110-a330-19ab7710acd9","Type":"ContainerDied","Data":"b1b13b5abb3505b4188da76f89fc693a8d2cd6605293280c779dbe5c4a27fe4b"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.075963 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-lb6d9" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.080972 4669 generic.go:334] "Generic (PLEG): container finished" podID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerID="f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.081066 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" event={"ID":"a40f1577-aae9-4e5c-bfdb-21dd1a00445d","Type":"ContainerDied","Data":"f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.081103 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" event={"ID":"a40f1577-aae9-4e5c-bfdb-21dd1a00445d","Type":"ContainerDied","Data":"7714cb8e90ddd01a659f693ed5e441caa247782f18113218282bda139e3f3684"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.081186 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6c97z" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.088860 4669 generic.go:334] "Generic (PLEG): container finished" podID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerID="d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.088980 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerDied","Data":"d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.089011 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h89pm" event={"ID":"0f3441be-4b11-4f4a-b072-7ca1894c5f86","Type":"ContainerDied","Data":"20c0d8248416e41ba891d2dd034ab3bf39c1525532234ae649b3698a3a0e6eb4"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.089622 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h89pm" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.096737 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.096803 4669 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="c132be169f117b802d1a1f819e11d5731bd37db395da70b342b0b84da3dc6cf6" exitCode=137 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.114502 4669 generic.go:334] "Generic (PLEG): container finished" podID="1654a8ed-45e1-416b-9082-21c947d03a70" containerID="df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42" exitCode=0 Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.114595 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerDied","Data":"df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.114659 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xmtb8" event={"ID":"1654a8ed-45e1-416b-9082-21c947d03a70","Type":"ContainerDied","Data":"58d3cf3ba9a6a55159b39500405353e87452e2e3e00b6795ec69f2ebed92c3a1"} Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.114923 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xmtb8" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.115289 4669 scope.go:117] "RemoveContainer" containerID="9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.118932 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnn4r"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.136053 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-vnn4r"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.139961 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ssjx9"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140423 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm6tx\" (UniqueName: \"kubernetes.io/projected/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-kube-api-access-sm6tx\") pod \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140542 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-utilities\") pod \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140680 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-catalog-content\") pod \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\" (UID: \"04c10baa-4c4e-4359-a93e-c76d6f5e1cb5\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140728 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqd76\" (UniqueName: \"kubernetes.io/projected/99dd4f24-38ac-4110-a330-19ab7710acd9-kube-api-access-fqd76\") pod \"99dd4f24-38ac-4110-a330-19ab7710acd9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140754 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cg755\" (UniqueName: \"kubernetes.io/projected/e6b094d9-c376-4f11-8c0e-7764c92d1031-kube-api-access-cg755\") pod \"e6b094d9-c376-4f11-8c0e-7764c92d1031\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140783 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-catalog-content\") pod \"e6b094d9-c376-4f11-8c0e-7764c92d1031\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140815 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-utilities\") pod \"e6b094d9-c376-4f11-8c0e-7764c92d1031\" (UID: \"e6b094d9-c376-4f11-8c0e-7764c92d1031\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140842 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-catalog-content\") pod \"99dd4f24-38ac-4110-a330-19ab7710acd9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140866 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-utilities\") pod \"99dd4f24-38ac-4110-a330-19ab7710acd9\" (UID: \"99dd4f24-38ac-4110-a330-19ab7710acd9\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.140915 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-catalog-content\") pod \"1654a8ed-45e1-416b-9082-21c947d03a70\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.141325 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-utilities" (OuterVolumeSpecName: "utilities") pod "04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" (UID: "04c10baa-4c4e-4359-a93e-c76d6f5e1cb5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.148251 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-kube-api-access-sm6tx" (OuterVolumeSpecName: "kube-api-access-sm6tx") pod "04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" (UID: "04c10baa-4c4e-4359-a93e-c76d6f5e1cb5"). InnerVolumeSpecName "kube-api-access-sm6tx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.151261 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-utilities" (OuterVolumeSpecName: "utilities") pod "99dd4f24-38ac-4110-a330-19ab7710acd9" (UID: "99dd4f24-38ac-4110-a330-19ab7710acd9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.156652 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-utilities" (OuterVolumeSpecName: "utilities") pod "e6b094d9-c376-4f11-8c0e-7764c92d1031" (UID: "e6b094d9-c376-4f11-8c0e-7764c92d1031"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.158908 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99dd4f24-38ac-4110-a330-19ab7710acd9-kube-api-access-fqd76" (OuterVolumeSpecName: "kube-api-access-fqd76") pod "99dd4f24-38ac-4110-a330-19ab7710acd9" (UID: "99dd4f24-38ac-4110-a330-19ab7710acd9"). InnerVolumeSpecName "kube-api-access-fqd76". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.159468 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6b094d9-c376-4f11-8c0e-7764c92d1031-kube-api-access-cg755" (OuterVolumeSpecName: "kube-api-access-cg755") pod "e6b094d9-c376-4f11-8c0e-7764c92d1031" (UID: "e6b094d9-c376-4f11-8c0e-7764c92d1031"). InnerVolumeSpecName "kube-api-access-cg755". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.169330 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ssjx9"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.192662 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6c97z"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.199051 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6c97z"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.204012 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t5l5h"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.214341 4669 scope.go:117] "RemoveContainer" containerID="2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.215395 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243\": container with ID starting with 2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243 not found: ID does not exist" containerID="2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.215471 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243"} err="failed to get container status \"2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243\": rpc error: code = NotFound desc = could not find container \"2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243\": container with ID starting with 2a09547795328fce72f303cea553966de56eb42a1dc437cfcf5158819f633243 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.215519 4669 scope.go:117] "RemoveContainer" containerID="a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.217517 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3\": container with ID starting with a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3 not found: ID does not exist" containerID="a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.217589 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3"} err="failed to get container status \"a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3\": rpc error: code = NotFound desc = could not find container \"a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3\": container with ID starting with a9abf9b6c5c794414c436a80a6e0ec2f5fda8b1563e9da52b336518d399bbbb3 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.217643 4669 scope.go:117] "RemoveContainer" containerID="9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.226732 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t5l5h"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.235151 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h89pm"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.239516 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h89pm"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.241812 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkpq8\" (UniqueName: \"kubernetes.io/projected/1654a8ed-45e1-416b-9082-21c947d03a70-kube-api-access-qkpq8\") pod \"1654a8ed-45e1-416b-9082-21c947d03a70\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.241900 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-utilities\") pod \"1654a8ed-45e1-416b-9082-21c947d03a70\" (UID: \"1654a8ed-45e1-416b-9082-21c947d03a70\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242295 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm6tx\" (UniqueName: \"kubernetes.io/projected/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-kube-api-access-sm6tx\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242321 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242332 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqd76\" (UniqueName: \"kubernetes.io/projected/99dd4f24-38ac-4110-a330-19ab7710acd9-kube-api-access-fqd76\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242343 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cg755\" (UniqueName: \"kubernetes.io/projected/e6b094d9-c376-4f11-8c0e-7764c92d1031-kube-api-access-cg755\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242355 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242364 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.242466 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e\": container with ID starting with 9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e not found: ID does not exist" containerID="9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242496 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e"} err="failed to get container status \"9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e\": rpc error: code = NotFound desc = could not find container \"9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e\": container with ID starting with 9fd5475828ab936f11e42ce8823485633318938404b538e7edacad34d615b71e not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.242527 4669 scope.go:117] "RemoveContainer" containerID="5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.245425 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-utilities" (OuterVolumeSpecName: "utilities") pod "1654a8ed-45e1-416b-9082-21c947d03a70" (UID: "1654a8ed-45e1-416b-9082-21c947d03a70"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.249687 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1654a8ed-45e1-416b-9082-21c947d03a70-kube-api-access-qkpq8" (OuterVolumeSpecName: "kube-api-access-qkpq8") pod "1654a8ed-45e1-416b-9082-21c947d03a70" (UID: "1654a8ed-45e1-416b-9082-21c947d03a70"). InnerVolumeSpecName "kube-api-access-qkpq8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.258140 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99dd4f24-38ac-4110-a330-19ab7710acd9" (UID: "99dd4f24-38ac-4110-a330-19ab7710acd9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.271213 4669 scope.go:117] "RemoveContainer" containerID="edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.273870 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" (UID: "04c10baa-4c4e-4359-a93e-c76d6f5e1cb5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.310766 4669 scope.go:117] "RemoveContainer" containerID="89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.324594 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.325096 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.328746 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e6b094d9-c376-4f11-8c0e-7764c92d1031" (UID: "e6b094d9-c376-4f11-8c0e-7764c92d1031"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.329669 4669 scope.go:117] "RemoveContainer" containerID="5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.330937 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0\": container with ID starting with 5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0 not found: ID does not exist" containerID="5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.331049 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0"} err="failed to get container status \"5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0\": rpc error: code = NotFound desc = could not find container \"5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0\": container with ID starting with 5fd5d990444bc61f8da7c1aade14c365dac44e0053a95dc2342c21dec2d5ecc0 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.331091 4669 scope.go:117] "RemoveContainer" containerID="edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.331921 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e\": container with ID starting with edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e not found: ID does not exist" containerID="edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.331960 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e"} err="failed to get container status \"edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e\": rpc error: code = NotFound desc = could not find container \"edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e\": container with ID starting with edf99592b3e9ac7e54f11fc826cec2b5ae9133e4d209041098a8c2380b6fa76e not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.331985 4669 scope.go:117] "RemoveContainer" containerID="89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.334872 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd\": container with ID starting with 89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd not found: ID does not exist" containerID="89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.334924 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd"} err="failed to get container status \"89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd\": rpc error: code = NotFound desc = could not find container \"89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd\": container with ID starting with 89a94723eb485ea146527439aacc94ec9663beba7f6adaf07a77649e1e7f10bd not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.334956 4669 scope.go:117] "RemoveContainer" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.343412 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99dd4f24-38ac-4110-a330-19ab7710acd9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.343491 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkpq8\" (UniqueName: \"kubernetes.io/projected/1654a8ed-45e1-416b-9082-21c947d03a70-kube-api-access-qkpq8\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.343507 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.343517 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.343528 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e6b094d9-c376-4f11-8c0e-7764c92d1031-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.349493 4669 scope.go:117] "RemoveContainer" containerID="742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.350078 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1654a8ed-45e1-416b-9082-21c947d03a70" (UID: "1654a8ed-45e1-416b-9082-21c947d03a70"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.364961 4669 scope.go:117] "RemoveContainer" containerID="d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.388963 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qcjfk"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.394173 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qcjfk"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.400583 4669 scope.go:117] "RemoveContainer" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.401139 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50\": container with ID starting with dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 not found: ID does not exist" containerID="dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.401334 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50"} err="failed to get container status \"dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50\": rpc error: code = NotFound desc = could not find container \"dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50\": container with ID starting with dbbbe1641e7abe0265fa959b35efa8a12f64bcf35df3d937614cd742ba1c6a50 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.401458 4669 scope.go:117] "RemoveContainer" containerID="742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.401694 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lmf56"] Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.402097 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64\": container with ID starting with 742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64 not found: ID does not exist" containerID="742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.402230 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64"} err="failed to get container status \"742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64\": rpc error: code = NotFound desc = could not find container \"742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64\": container with ID starting with 742b61826d35e0a8dad55f6601690c254267759c34f31c1f547bab5430ff6e64 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.402339 4669 scope.go:117] "RemoveContainer" containerID="d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.402694 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0\": container with ID starting with d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0 not found: ID does not exist" containerID="d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.402801 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0"} err="failed to get container status \"d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0\": rpc error: code = NotFound desc = could not find container \"d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0\": container with ID starting with d6a43eec83d588747fca99af3e70757faa894241f312977be893efcbfdf2e2d0 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.402917 4669 scope.go:117] "RemoveContainer" containerID="e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.406078 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lmf56"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.428752 4669 scope.go:117] "RemoveContainer" containerID="5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.434078 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-lb6d9"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.438651 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-lb6d9"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.444366 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.444556 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.444966 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.445006 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.445031 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.445052 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.445283 4669 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.445301 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1654a8ed-45e1-416b-9082-21c947d03a70-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.446566 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.446607 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.446857 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.455396 4669 scope.go:117] "RemoveContainer" containerID="de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.456597 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.467974 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xmtb8"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.472611 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xmtb8"] Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.473361 4669 scope.go:117] "RemoveContainer" containerID="e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.473974 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25\": container with ID starting with e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25 not found: ID does not exist" containerID="e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.474053 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25"} err="failed to get container status \"e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25\": rpc error: code = NotFound desc = could not find container \"e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25\": container with ID starting with e7caff780ee300c046e8f86685ae19ff3f40baa6a5d811e102988e1770533f25 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.474113 4669 scope.go:117] "RemoveContainer" containerID="5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.474755 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9\": container with ID starting with 5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9 not found: ID does not exist" containerID="5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.474800 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9"} err="failed to get container status \"5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9\": rpc error: code = NotFound desc = could not find container \"5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9\": container with ID starting with 5c3cd6e579a83908aeb043a2207ecc6df5b31e0edde06c027328b26b607a3ac9 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.474828 4669 scope.go:117] "RemoveContainer" containerID="de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.475341 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f\": container with ID starting with de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f not found: ID does not exist" containerID="de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.475389 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f"} err="failed to get container status \"de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f\": rpc error: code = NotFound desc = could not find container \"de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f\": container with ID starting with de95143b57a3c8b199a66d1a0d4fb5e17a0dd5d4f18a35fa880571bfd97e598f not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.475425 4669 scope.go:117] "RemoveContainer" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.491082 4669 scope.go:117] "RemoveContainer" containerID="6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.508051 4669 scope.go:117] "RemoveContainer" containerID="e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.524975 4669 scope.go:117] "RemoveContainer" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.525746 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0\": container with ID starting with ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0 not found: ID does not exist" containerID="ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.525804 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0"} err="failed to get container status \"ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0\": rpc error: code = NotFound desc = could not find container \"ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0\": container with ID starting with ebae0bcb5a0629c6b309afc0b6d3cd44adcefc74a868ea32c2e8507537f2f3a0 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.525842 4669 scope.go:117] "RemoveContainer" containerID="6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.526656 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189\": container with ID starting with 6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189 not found: ID does not exist" containerID="6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.526870 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189"} err="failed to get container status \"6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189\": rpc error: code = NotFound desc = could not find container \"6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189\": container with ID starting with 6ce0839a85e68afeb297ab2fd13de22297e8bf389ab044309bf8c670fea43189 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.527100 4669 scope.go:117] "RemoveContainer" containerID="e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.527782 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796\": container with ID starting with e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796 not found: ID does not exist" containerID="e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.527845 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796"} err="failed to get container status \"e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796\": rpc error: code = NotFound desc = could not find container \"e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796\": container with ID starting with e47cb2f04357fd60b94bec3204a66dd28341d5879e73da14338448b7b0f5d796 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.527875 4669 scope.go:117] "RemoveContainer" containerID="e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.545358 4669 scope.go:117] "RemoveContainer" containerID="c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.546752 4669 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.546901 4669 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.547000 4669 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.547087 4669 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.568205 4669 scope.go:117] "RemoveContainer" containerID="054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.588794 4669 scope.go:117] "RemoveContainer" containerID="e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.589525 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361\": container with ID starting with e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361 not found: ID does not exist" containerID="e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.589567 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361"} err="failed to get container status \"e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361\": rpc error: code = NotFound desc = could not find container \"e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361\": container with ID starting with e7642cd167441fb14910f3ccc4a4a696012796bb7d5fe046d5586212b5270361 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.589599 4669 scope.go:117] "RemoveContainer" containerID="c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.590108 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05\": container with ID starting with c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05 not found: ID does not exist" containerID="c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.590132 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05"} err="failed to get container status \"c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05\": rpc error: code = NotFound desc = could not find container \"c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05\": container with ID starting with c226d7327c429d9fbb213ad492fc340e204d64fa3ae1015e20f223adbfe6ea05 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.590147 4669 scope.go:117] "RemoveContainer" containerID="054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.590636 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8\": container with ID starting with 054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8 not found: ID does not exist" containerID="054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.590696 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8"} err="failed to get container status \"054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8\": rpc error: code = NotFound desc = could not find container \"054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8\": container with ID starting with 054936fb378c7fd313f07e54bac8c9de1e0d96a08baa04a12e8c4a593bbf0ed8 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.590722 4669 scope.go:117] "RemoveContainer" containerID="f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.608384 4669 scope.go:117] "RemoveContainer" containerID="f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.609066 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136\": container with ID starting with f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136 not found: ID does not exist" containerID="f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.609157 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136"} err="failed to get container status \"f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136\": rpc error: code = NotFound desc = could not find container \"f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136\": container with ID starting with f15361da1c8113b32f651e081d6bd80b9c80d438c54b38a7e2dbaba6d41cf136 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.609247 4669 scope.go:117] "RemoveContainer" containerID="d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.624790 4669 scope.go:117] "RemoveContainer" containerID="6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.649966 4669 scope.go:117] "RemoveContainer" containerID="a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.666148 4669 scope.go:117] "RemoveContainer" containerID="d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.666849 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8\": container with ID starting with d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8 not found: ID does not exist" containerID="d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.666912 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8"} err="failed to get container status \"d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8\": rpc error: code = NotFound desc = could not find container \"d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8\": container with ID starting with d8f8935b1f2405de9c82e7376c61529191311b0441baa51d037709f6cdd882b8 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.666964 4669 scope.go:117] "RemoveContainer" containerID="6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.667670 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e\": container with ID starting with 6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e not found: ID does not exist" containerID="6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.667746 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e"} err="failed to get container status \"6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e\": rpc error: code = NotFound desc = could not find container \"6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e\": container with ID starting with 6ba0f00494e4042e02f7c0fbc9112b05221178173a05188a52e73b1a9b23e82e not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.667794 4669 scope.go:117] "RemoveContainer" containerID="a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.668529 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86\": container with ID starting with a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86 not found: ID does not exist" containerID="a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.668586 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86"} err="failed to get container status \"a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86\": rpc error: code = NotFound desc = could not find container \"a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86\": container with ID starting with a90f80d4f5db58e340e9db0358ee4473b92e42a42284c066e5592d301e14cc86 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.668627 4669 scope.go:117] "RemoveContainer" containerID="df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.686166 4669 scope.go:117] "RemoveContainer" containerID="19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.703484 4669 scope.go:117] "RemoveContainer" containerID="e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.721627 4669 scope.go:117] "RemoveContainer" containerID="df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.722547 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42\": container with ID starting with df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42 not found: ID does not exist" containerID="df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.722612 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42"} err="failed to get container status \"df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42\": rpc error: code = NotFound desc = could not find container \"df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42\": container with ID starting with df7b4903fa602014b6ac0f6f9c6d6771fd9a2e5a12976c929a03159e8b3d6e42 not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.722672 4669 scope.go:117] "RemoveContainer" containerID="19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.723500 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d\": container with ID starting with 19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d not found: ID does not exist" containerID="19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.723546 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d"} err="failed to get container status \"19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d\": rpc error: code = NotFound desc = could not find container \"19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d\": container with ID starting with 19966c13ecb917fffdb60ca45e5dc0d55d7a2175d4905667e5be418fa3aef87d not found: ID does not exist" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.723565 4669 scope.go:117] "RemoveContainer" containerID="e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89" Dec 10 15:25:23 crc kubenswrapper[4669]: E1210 15:25:23.724037 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89\": container with ID starting with e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89 not found: ID does not exist" containerID="e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89" Dec 10 15:25:23 crc kubenswrapper[4669]: I1210 15:25:23.724133 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89"} err="failed to get container status \"e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89\": rpc error: code = NotFound desc = could not find container \"e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89\": container with ID starting with e6ac459304df19965a7b4eec6cfaf975e515de0ef9b746cce586ea3fe134ae89 not found: ID does not exist" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.146552 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.146921 4669 scope.go:117] "RemoveContainer" containerID="c132be169f117b802d1a1f819e11d5731bd37db395da70b342b0b84da3dc6cf6" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.146987 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.404468 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" path="/var/lib/kubelet/pods/04c10baa-4c4e-4359-a93e-c76d6f5e1cb5/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.405449 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" path="/var/lib/kubelet/pods/0c2d3166-9bdb-4d65-8c41-676fc90c8bb7/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.406002 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" path="/var/lib/kubelet/pods/0f3441be-4b11-4f4a-b072-7ca1894c5f86/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.406599 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" path="/var/lib/kubelet/pods/1654a8ed-45e1-416b-9082-21c947d03a70/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.407173 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" path="/var/lib/kubelet/pods/71b77d7f-f74a-4442-a9df-2c36237983a2/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.407785 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" path="/var/lib/kubelet/pods/8d636e2e-c705-4462-bc33-88f18c5f3aa2/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.408368 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" path="/var/lib/kubelet/pods/99dd4f24-38ac-4110-a330-19ab7710acd9/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.410166 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" path="/var/lib/kubelet/pods/a40f1577-aae9-4e5c-bfdb-21dd1a00445d/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.410612 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" path="/var/lib/kubelet/pods/e6b094d9-c376-4f11-8c0e-7764c92d1031/volumes" Dec 10 15:25:24 crc kubenswrapper[4669]: I1210 15:25:24.411056 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 10 15:25:40 crc kubenswrapper[4669]: I1210 15:25:40.672605 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.267298 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.270327 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.270391 4669 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="15177e5d023ebcbf298cc88874620ef1e93d78772b9a2cf1ff30995b4ee72dab" exitCode=137 Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.270425 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"15177e5d023ebcbf298cc88874620ef1e93d78772b9a2cf1ff30995b4ee72dab"} Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.270475 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d3473a1ba41adabd2fc0c197c0eea18e722f5e0779e43d38506911d11b810d75"} Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.270492 4669 scope.go:117] "RemoveContainer" containerID="647896b479ba771c182a7e06076a4a8cce7e4fc63f7ac3235df70700c1caa4f5" Dec 10 15:25:41 crc kubenswrapper[4669]: I1210 15:25:41.736156 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 10 15:25:42 crc kubenswrapper[4669]: I1210 15:25:42.278901 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 10 15:25:43 crc kubenswrapper[4669]: I1210 15:25:43.983635 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:25:45 crc kubenswrapper[4669]: I1210 15:25:45.475891 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 15:25:45 crc kubenswrapper[4669]: I1210 15:25:45.632390 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 10 15:25:46 crc kubenswrapper[4669]: I1210 15:25:46.400261 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 15:25:48 crc kubenswrapper[4669]: I1210 15:25:48.528073 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 10 15:25:49 crc kubenswrapper[4669]: I1210 15:25:49.160134 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 10 15:25:50 crc kubenswrapper[4669]: I1210 15:25:50.276203 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:25:50 crc kubenswrapper[4669]: I1210 15:25:50.280556 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:25:52 crc kubenswrapper[4669]: I1210 15:25:52.789188 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 10 15:25:53 crc kubenswrapper[4669]: I1210 15:25:53.988466 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.407587 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szgr8"] Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408066 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408079 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408093 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408099 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408109 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408115 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408125 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408132 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408143 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408149 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408156 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408163 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408171 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408177 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408189 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408195 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408202 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408208 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408239 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408245 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408253 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408260 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408267 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408273 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408283 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408288 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408493 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408498 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408508 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408513 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408520 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408526 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408532 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408553 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408566 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" containerName="installer" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408572 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" containerName="installer" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408580 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408585 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408592 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408597 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408603 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408611 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408618 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408623 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408634 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408640 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408647 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408653 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408660 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408665 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408672 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408677 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="extract-content" Dec 10 15:25:58 crc kubenswrapper[4669]: E1210 15:25:58.408685 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408690 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="extract-utilities" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408782 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="99dd4f24-38ac-4110-a330-19ab7710acd9" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408792 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6b094d9-c376-4f11-8c0e-7764c92d1031" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408803 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="71b77d7f-f74a-4442-a9df-2c36237983a2" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408810 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408819 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d636e2e-c705-4462-bc33-88f18c5f3aa2" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408827 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1654a8ed-45e1-416b-9082-21c947d03a70" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408835 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="0c2d3166-9bdb-4d65-8c41-676fc90c8bb7" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408843 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f3441be-4b11-4f4a-b072-7ca1894c5f86" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408851 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a40f1577-aae9-4e5c-bfdb-21dd1a00445d" containerName="marketplace-operator" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408859 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="28c45c59-6d2c-4162-9dd0-40cef9280420" containerName="installer" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.408866 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="04c10baa-4c4e-4359-a93e-c76d6f5e1cb5" containerName="registry-server" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.409280 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.410910 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7j6zx"] Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.411259 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" podUID="7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" containerName="controller-manager" containerID="cri-o://616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837" gracePeriod=30 Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.413569 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m"] Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.413844 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerName="route-controller-manager" containerID="cri-o://f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9" gracePeriod=30 Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.415363 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.419745 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.420260 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.421109 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.429448 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.439860 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szgr8"] Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.514655 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ea027979-8f03-4b1d-862f-a7b7e72155c1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.514732 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dmb7r\" (UniqueName: \"kubernetes.io/projected/ea027979-8f03-4b1d-862f-a7b7e72155c1-kube-api-access-dmb7r\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.514795 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea027979-8f03-4b1d-862f-a7b7e72155c1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.616541 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ea027979-8f03-4b1d-862f-a7b7e72155c1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.616636 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dmb7r\" (UniqueName: \"kubernetes.io/projected/ea027979-8f03-4b1d-862f-a7b7e72155c1-kube-api-access-dmb7r\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.616682 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea027979-8f03-4b1d-862f-a7b7e72155c1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.618560 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ea027979-8f03-4b1d-862f-a7b7e72155c1-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.626677 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/ea027979-8f03-4b1d-862f-a7b7e72155c1-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.658708 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dmb7r\" (UniqueName: \"kubernetes.io/projected/ea027979-8f03-4b1d-862f-a7b7e72155c1-kube-api-access-dmb7r\") pod \"marketplace-operator-79b997595-szgr8\" (UID: \"ea027979-8f03-4b1d-862f-a7b7e72155c1\") " pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.739668 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.859924 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rv774"] Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.860717 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.891516 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rv774"] Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.904875 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:25:58 crc kubenswrapper[4669]: I1210 15:25:58.984573 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024427 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-client-ca\") pod \"14ff27a3-e946-4b7c-a56c-d7da016d86df\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024489 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-config\") pod \"14ff27a3-e946-4b7c-a56c-d7da016d86df\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024525 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14ff27a3-e946-4b7c-a56c-d7da016d86df-serving-cert\") pod \"14ff27a3-e946-4b7c-a56c-d7da016d86df\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024552 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpqwc\" (UniqueName: \"kubernetes.io/projected/14ff27a3-e946-4b7c-a56c-d7da016d86df-kube-api-access-qpqwc\") pod \"14ff27a3-e946-4b7c-a56c-d7da016d86df\" (UID: \"14ff27a3-e946-4b7c-a56c-d7da016d86df\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024756 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-bound-sa-token\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024814 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-registry-tls\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024838 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024855 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-registry-certificates\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024877 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024917 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024938 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-trusted-ca\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.024954 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdzl5\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-kube-api-access-cdzl5\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.025957 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-client-ca" (OuterVolumeSpecName: "client-ca") pod "14ff27a3-e946-4b7c-a56c-d7da016d86df" (UID: "14ff27a3-e946-4b7c-a56c-d7da016d86df"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.026031 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-config" (OuterVolumeSpecName: "config") pod "14ff27a3-e946-4b7c-a56c-d7da016d86df" (UID: "14ff27a3-e946-4b7c-a56c-d7da016d86df"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.046936 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14ff27a3-e946-4b7c-a56c-d7da016d86df-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "14ff27a3-e946-4b7c-a56c-d7da016d86df" (UID: "14ff27a3-e946-4b7c-a56c-d7da016d86df"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.047161 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14ff27a3-e946-4b7c-a56c-d7da016d86df-kube-api-access-qpqwc" (OuterVolumeSpecName: "kube-api-access-qpqwc") pod "14ff27a3-e946-4b7c-a56c-d7da016d86df" (UID: "14ff27a3-e946-4b7c-a56c-d7da016d86df"). InnerVolumeSpecName "kube-api-access-qpqwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.089698 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126017 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-proxy-ca-bundles\") pod \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126074 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m5csv\" (UniqueName: \"kubernetes.io/projected/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-kube-api-access-m5csv\") pod \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126101 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-client-ca\") pod \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126145 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-serving-cert\") pod \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126282 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-config\") pod \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\" (UID: \"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b\") " Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126414 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-trusted-ca\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126440 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdzl5\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-kube-api-access-cdzl5\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126478 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-bound-sa-token\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126507 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-registry-tls\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126533 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126549 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-registry-certificates\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126568 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126624 4669 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126636 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/14ff27a3-e946-4b7c-a56c-d7da016d86df-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126646 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/14ff27a3-e946-4b7c-a56c-d7da016d86df-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.126654 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpqwc\" (UniqueName: \"kubernetes.io/projected/14ff27a3-e946-4b7c-a56c-d7da016d86df-kube-api-access-qpqwc\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.127258 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" (UID: "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.128130 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-client-ca" (OuterVolumeSpecName: "client-ca") pod "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" (UID: "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.128850 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-config" (OuterVolumeSpecName: "config") pod "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" (UID: "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.129447 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.129910 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.130310 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-trusted-ca\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.130453 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-registry-certificates\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.133101 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-registry-tls\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.136592 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-kube-api-access-m5csv" (OuterVolumeSpecName: "kube-api-access-m5csv") pod "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" (UID: "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b"). InnerVolumeSpecName "kube-api-access-m5csv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.137980 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" (UID: "7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.145612 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdzl5\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-kube-api-access-cdzl5\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.152832 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d15aa43e-1703-4ea2-8214-eb9ef68e47ee-bound-sa-token\") pod \"image-registry-66df7c8f76-rv774\" (UID: \"d15aa43e-1703-4ea2-8214-eb9ef68e47ee\") " pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.202550 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.227481 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.227522 4669 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.227534 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m5csv\" (UniqueName: \"kubernetes.io/projected/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-kube-api-access-m5csv\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.227545 4669 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.227553 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.395296 4669 generic.go:334] "Generic (PLEG): container finished" podID="7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" containerID="616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837" exitCode=0 Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.395373 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" event={"ID":"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b","Type":"ContainerDied","Data":"616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837"} Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.395423 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" event={"ID":"7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b","Type":"ContainerDied","Data":"3abcb0d7e34f39b8b8ebcde1a82e8c38de75d23c35ef2de5130e6302519b01c1"} Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.395444 4669 scope.go:117] "RemoveContainer" containerID="616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.395678 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7j6zx" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.400719 4669 generic.go:334] "Generic (PLEG): container finished" podID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerID="f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9" exitCode=0 Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.400794 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.400811 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" event={"ID":"14ff27a3-e946-4b7c-a56c-d7da016d86df","Type":"ContainerDied","Data":"f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9"} Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.400907 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m" event={"ID":"14ff27a3-e946-4b7c-a56c-d7da016d86df","Type":"ContainerDied","Data":"d5c76533392942c2943852799d8fef766153fe4405898523d0800afaea3b10eb"} Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.415795 4669 scope.go:117] "RemoveContainer" containerID="616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837" Dec 10 15:25:59 crc kubenswrapper[4669]: E1210 15:25:59.416278 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837\": container with ID starting with 616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837 not found: ID does not exist" containerID="616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.416308 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837"} err="failed to get container status \"616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837\": rpc error: code = NotFound desc = could not find container \"616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837\": container with ID starting with 616c4b24f0a6c7893572b53543bf308962bcafb4384bac3458738ef500ca1837 not found: ID does not exist" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.416329 4669 scope.go:117] "RemoveContainer" containerID="f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.429105 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-szgr8"] Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.461268 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m"] Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.467367 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-r2g6m"] Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.467542 4669 scope.go:117] "RemoveContainer" containerID="f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9" Dec 10 15:25:59 crc kubenswrapper[4669]: E1210 15:25:59.468708 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9\": container with ID starting with f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9 not found: ID does not exist" containerID="f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.468752 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9"} err="failed to get container status \"f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9\": rpc error: code = NotFound desc = could not find container \"f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9\": container with ID starting with f0ce28d25c92a31f9bd744a5b9d2aa068b5923bf6775a52a1113446e637ef1c9 not found: ID does not exist" Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.478443 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7j6zx"] Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.497047 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7j6zx"] Dec 10 15:25:59 crc kubenswrapper[4669]: I1210 15:25:59.517603 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rv774"] Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.404422 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" path="/var/lib/kubelet/pods/14ff27a3-e946-4b7c-a56c-d7da016d86df/volumes" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.405147 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" path="/var/lib/kubelet/pods/7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b/volumes" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.407498 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" event={"ID":"ea027979-8f03-4b1d-862f-a7b7e72155c1","Type":"ContainerStarted","Data":"4b6bc95672a877b7921017963cce81886a84ad821408a4ae3ef7602d81b0b6e9"} Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.407571 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" event={"ID":"ea027979-8f03-4b1d-862f-a7b7e72155c1","Type":"ContainerStarted","Data":"1d4b9366bb48ab287de201514e4a16b3d95d083ec8797be557093d028d3e11c5"} Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.407679 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.410452 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.412361 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" event={"ID":"d15aa43e-1703-4ea2-8214-eb9ef68e47ee","Type":"ContainerStarted","Data":"0ca373e5d12c2b02b558c4bdf621e0ee92e1a0e16dc67d69c301eed6480ac1e2"} Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.412389 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" event={"ID":"d15aa43e-1703-4ea2-8214-eb9ef68e47ee","Type":"ContainerStarted","Data":"02c5014fbaf20089fb923b57ed54208ca22e578689150edbeb92a4786182bc65"} Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.412672 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.427287 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-szgr8" podStartSLOduration=2.427267539 podStartE2EDuration="2.427267539s" podCreationTimestamp="2025-12-10 15:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:26:00.425245827 +0000 UTC m=+334.342192454" watchObservedRunningTime="2025-12-10 15:26:00.427267539 +0000 UTC m=+334.344214166" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.455716 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w"] Dec 10 15:26:00 crc kubenswrapper[4669]: E1210 15:26:00.456199 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" containerName="controller-manager" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.456224 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" containerName="controller-manager" Dec 10 15:26:00 crc kubenswrapper[4669]: E1210 15:26:00.456238 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerName="route-controller-manager" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.456244 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerName="route-controller-manager" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.456342 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d1da0c8-1cd6-4fba-bd4d-127a01cfe26b" containerName="controller-manager" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.456361 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="14ff27a3-e946-4b7c-a56c-d7da016d86df" containerName="route-controller-manager" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.456684 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.459695 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.463643 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq"] Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.464493 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.464735 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.464891 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.465741 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.466288 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.466517 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.469436 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.469617 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.469828 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.470966 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.474068 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.474959 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.475550 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.489239 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" podStartSLOduration=2.489206723 podStartE2EDuration="2.489206723s" podCreationTimestamp="2025-12-10 15:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:26:00.475288063 +0000 UTC m=+334.392234690" watchObservedRunningTime="2025-12-10 15:26:00.489206723 +0000 UTC m=+334.406153350" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.490317 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w"] Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.558113 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq"] Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565143 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-proxy-ca-bundles\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565270 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-config\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565302 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52f05ad6-cb54-460f-b6b6-36b41e75fa85-serving-cert\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565336 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-client-ca\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565363 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmxkx\" (UniqueName: \"kubernetes.io/projected/abf813d9-b35d-46e8-bd23-9fe112c2370e-kube-api-access-wmxkx\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565383 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abf813d9-b35d-46e8-bd23-9fe112c2370e-serving-cert\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565404 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znkk5\" (UniqueName: \"kubernetes.io/projected/52f05ad6-cb54-460f-b6b6-36b41e75fa85-kube-api-access-znkk5\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565422 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-config\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.565450 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-client-ca\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.666677 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abf813d9-b35d-46e8-bd23-9fe112c2370e-serving-cert\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.666734 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znkk5\" (UniqueName: \"kubernetes.io/projected/52f05ad6-cb54-460f-b6b6-36b41e75fa85-kube-api-access-znkk5\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.666755 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-config\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.666779 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-client-ca\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.666828 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-proxy-ca-bundles\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.666888 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-config\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.667268 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52f05ad6-cb54-460f-b6b6-36b41e75fa85-serving-cert\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.667309 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-client-ca\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.667336 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmxkx\" (UniqueName: \"kubernetes.io/projected/abf813d9-b35d-46e8-bd23-9fe112c2370e-kube-api-access-wmxkx\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.668167 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-client-ca\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.668890 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-config\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.670096 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-client-ca\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.670813 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-config\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.670982 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/abf813d9-b35d-46e8-bd23-9fe112c2370e-proxy-ca-bundles\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.676603 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52f05ad6-cb54-460f-b6b6-36b41e75fa85-serving-cert\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.678074 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/abf813d9-b35d-46e8-bd23-9fe112c2370e-serving-cert\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.684749 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znkk5\" (UniqueName: \"kubernetes.io/projected/52f05ad6-cb54-460f-b6b6-36b41e75fa85-kube-api-access-znkk5\") pod \"route-controller-manager-54d8bb9649-g9glq\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.691116 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmxkx\" (UniqueName: \"kubernetes.io/projected/abf813d9-b35d-46e8-bd23-9fe112c2370e-kube-api-access-wmxkx\") pod \"controller-manager-5f5cfcb84f-7gm9w\" (UID: \"abf813d9-b35d-46e8-bd23-9fe112c2370e\") " pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.770663 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:00 crc kubenswrapper[4669]: I1210 15:26:00.836159 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.070017 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w"] Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.143809 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq"] Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.418429 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" event={"ID":"52f05ad6-cb54-460f-b6b6-36b41e75fa85","Type":"ContainerStarted","Data":"75a3c98fa42a075b06b36a9a168e0db235282a82497dadd1ad5e456f9a025b46"} Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.418863 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" event={"ID":"52f05ad6-cb54-460f-b6b6-36b41e75fa85","Type":"ContainerStarted","Data":"fe3a0ec0c26bc2c68ba32ea5fa9431fe71b4dd032f6bc5ecd3943c8a13c13f64"} Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.418888 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.420193 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" event={"ID":"abf813d9-b35d-46e8-bd23-9fe112c2370e","Type":"ContainerStarted","Data":"020a5bcb7b571e2d71639d2a62a17951017a44bf4521bc2577f5b60403fbf1b7"} Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.420366 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" event={"ID":"abf813d9-b35d-46e8-bd23-9fe112c2370e","Type":"ContainerStarted","Data":"463e8519f0ffe0dbe0363f7b80ffa7152a011a6318f763554bd2d69acf2a5215"} Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.420715 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.425887 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.446973 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" podStartSLOduration=3.446955575 podStartE2EDuration="3.446955575s" podCreationTimestamp="2025-12-10 15:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:26:01.444040989 +0000 UTC m=+335.360987626" watchObservedRunningTime="2025-12-10 15:26:01.446955575 +0000 UTC m=+335.363902202" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.463750 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5f5cfcb84f-7gm9w" podStartSLOduration=3.463723559 podStartE2EDuration="3.463723559s" podCreationTimestamp="2025-12-10 15:25:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:26:01.461305197 +0000 UTC m=+335.378251834" watchObservedRunningTime="2025-12-10 15:26:01.463723559 +0000 UTC m=+335.380670186" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.780102 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.859933 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-zgjrg"] Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.861152 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.863285 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.875139 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zgjrg"] Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.887500 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81135d4d-3c9e-4d4e-8f6e-876541123d69-utilities\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.887578 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55vs9\" (UniqueName: \"kubernetes.io/projected/81135d4d-3c9e-4d4e-8f6e-876541123d69-kube-api-access-55vs9\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.887600 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81135d4d-3c9e-4d4e-8f6e-876541123d69-catalog-content\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.988392 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55vs9\" (UniqueName: \"kubernetes.io/projected/81135d4d-3c9e-4d4e-8f6e-876541123d69-kube-api-access-55vs9\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.988452 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81135d4d-3c9e-4d4e-8f6e-876541123d69-catalog-content\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.988516 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81135d4d-3c9e-4d4e-8f6e-876541123d69-utilities\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.989007 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81135d4d-3c9e-4d4e-8f6e-876541123d69-utilities\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:01 crc kubenswrapper[4669]: I1210 15:26:01.989114 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81135d4d-3c9e-4d4e-8f6e-876541123d69-catalog-content\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.019631 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55vs9\" (UniqueName: \"kubernetes.io/projected/81135d4d-3c9e-4d4e-8f6e-876541123d69-kube-api-access-55vs9\") pod \"redhat-marketplace-zgjrg\" (UID: \"81135d4d-3c9e-4d4e-8f6e-876541123d69\") " pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.179268 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.441101 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-xtmp8"] Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.442924 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.444588 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.457055 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtmp8"] Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.496055 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-utilities\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.498666 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-catalog-content\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.498796 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg7kt\" (UniqueName: \"kubernetes.io/projected/35fc6428-e432-43a3-9ba3-09e64c8c3c65-kube-api-access-pg7kt\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.599542 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-catalog-content\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.599596 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg7kt\" (UniqueName: \"kubernetes.io/projected/35fc6428-e432-43a3-9ba3-09e64c8c3c65-kube-api-access-pg7kt\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.599623 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-utilities\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.600081 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-utilities\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.600319 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-catalog-content\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.619951 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg7kt\" (UniqueName: \"kubernetes.io/projected/35fc6428-e432-43a3-9ba3-09e64c8c3c65-kube-api-access-pg7kt\") pod \"redhat-operators-xtmp8\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.726159 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-zgjrg"] Dec 10 15:26:02 crc kubenswrapper[4669]: W1210 15:26:02.735557 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81135d4d_3c9e_4d4e_8f6e_876541123d69.slice/crio-a5d5e2f928c3eed67bae25f935e88373fee19555e8c9b578595f7edfd32cbcf0 WatchSource:0}: Error finding container a5d5e2f928c3eed67bae25f935e88373fee19555e8c9b578595f7edfd32cbcf0: Status 404 returned error can't find the container with id a5d5e2f928c3eed67bae25f935e88373fee19555e8c9b578595f7edfd32cbcf0 Dec 10 15:26:02 crc kubenswrapper[4669]: I1210 15:26:02.769099 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.064564 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-xtmp8"] Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.441617 4669 generic.go:334] "Generic (PLEG): container finished" podID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerID="cf553c53cb60b2de2244efdbb2c0a288f1018f5525242b2d509e07ef1a1fa099" exitCode=0 Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.441731 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerDied","Data":"cf553c53cb60b2de2244efdbb2c0a288f1018f5525242b2d509e07ef1a1fa099"} Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.441778 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerStarted","Data":"44a311a72324fe1cb67d9fc8e69cddf5f07546382de26f065308c7eab8819997"} Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.458198 4669 generic.go:334] "Generic (PLEG): container finished" podID="81135d4d-3c9e-4d4e-8f6e-876541123d69" containerID="843514a2745ccd6e484eea604c2d0b4d983a7498d8d7fa93749f803ce202c652" exitCode=0 Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.460840 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zgjrg" event={"ID":"81135d4d-3c9e-4d4e-8f6e-876541123d69","Type":"ContainerDied","Data":"843514a2745ccd6e484eea604c2d0b4d983a7498d8d7fa93749f803ce202c652"} Dec 10 15:26:03 crc kubenswrapper[4669]: I1210 15:26:03.461126 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zgjrg" event={"ID":"81135d4d-3c9e-4d4e-8f6e-876541123d69","Type":"ContainerStarted","Data":"a5d5e2f928c3eed67bae25f935e88373fee19555e8c9b578595f7edfd32cbcf0"} Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.242098 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gknnp"] Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.243383 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.247570 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.278455 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gknnp"] Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.367998 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kttrp\" (UniqueName: \"kubernetes.io/projected/ebd3f28a-b015-4c75-9b7c-2bab7c397648-kube-api-access-kttrp\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.368100 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd3f28a-b015-4c75-9b7c-2bab7c397648-catalog-content\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.368130 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd3f28a-b015-4c75-9b7c-2bab7c397648-utilities\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.470100 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kttrp\" (UniqueName: \"kubernetes.io/projected/ebd3f28a-b015-4c75-9b7c-2bab7c397648-kube-api-access-kttrp\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.470308 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd3f28a-b015-4c75-9b7c-2bab7c397648-catalog-content\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.470373 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd3f28a-b015-4c75-9b7c-2bab7c397648-utilities\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.471037 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebd3f28a-b015-4c75-9b7c-2bab7c397648-catalog-content\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.471468 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebd3f28a-b015-4c75-9b7c-2bab7c397648-utilities\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.502007 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kttrp\" (UniqueName: \"kubernetes.io/projected/ebd3f28a-b015-4c75-9b7c-2bab7c397648-kube-api-access-kttrp\") pod \"community-operators-gknnp\" (UID: \"ebd3f28a-b015-4c75-9b7c-2bab7c397648\") " pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.563914 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.836237 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jmngh"] Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.837413 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.842406 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.894546 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jmngh"] Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.979285 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a969328-820f-4d0e-9618-f40a0d6fd480-catalog-content\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.979369 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a969328-820f-4d0e-9618-f40a0d6fd480-utilities\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:04 crc kubenswrapper[4669]: I1210 15:26:04.979458 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n6f9\" (UniqueName: \"kubernetes.io/projected/5a969328-820f-4d0e-9618-f40a0d6fd480-kube-api-access-5n6f9\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.023921 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gknnp"] Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.080985 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n6f9\" (UniqueName: \"kubernetes.io/projected/5a969328-820f-4d0e-9618-f40a0d6fd480-kube-api-access-5n6f9\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.081109 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a969328-820f-4d0e-9618-f40a0d6fd480-catalog-content\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.081149 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a969328-820f-4d0e-9618-f40a0d6fd480-utilities\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.081646 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a969328-820f-4d0e-9618-f40a0d6fd480-utilities\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.081913 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a969328-820f-4d0e-9618-f40a0d6fd480-catalog-content\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.107290 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n6f9\" (UniqueName: \"kubernetes.io/projected/5a969328-820f-4d0e-9618-f40a0d6fd480-kube-api-access-5n6f9\") pod \"certified-operators-jmngh\" (UID: \"5a969328-820f-4d0e-9618-f40a0d6fd480\") " pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.156165 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.440586 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jmngh"] Dec 10 15:26:05 crc kubenswrapper[4669]: W1210 15:26:05.444600 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5a969328_820f_4d0e_9618_f40a0d6fd480.slice/crio-609b5837c987dea1b682cf643bb68ea8e69625a9d082e0ca3e9e5270d51d18e7 WatchSource:0}: Error finding container 609b5837c987dea1b682cf643bb68ea8e69625a9d082e0ca3e9e5270d51d18e7: Status 404 returned error can't find the container with id 609b5837c987dea1b682cf643bb68ea8e69625a9d082e0ca3e9e5270d51d18e7 Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.471438 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerStarted","Data":"110d01addc3a853b584f6557a186ddb2bd47464f301393b3a685056dbe0fdbbb"} Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.477681 4669 generic.go:334] "Generic (PLEG): container finished" podID="ebd3f28a-b015-4c75-9b7c-2bab7c397648" containerID="9d753f67be6597fd5be9fe6c6877dad392244fdf9dd155d60ed4af729a8c15d7" exitCode=0 Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.477816 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gknnp" event={"ID":"ebd3f28a-b015-4c75-9b7c-2bab7c397648","Type":"ContainerDied","Data":"9d753f67be6597fd5be9fe6c6877dad392244fdf9dd155d60ed4af729a8c15d7"} Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.477841 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gknnp" event={"ID":"ebd3f28a-b015-4c75-9b7c-2bab7c397648","Type":"ContainerStarted","Data":"3bb97f0120e62f995f360dd8af95699576502993805e09cbbf718d34d7119284"} Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.500139 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zgjrg" event={"ID":"81135d4d-3c9e-4d4e-8f6e-876541123d69","Type":"ContainerStarted","Data":"ca5578d8866689dd2cf0bddd3bec84590a3565e4ffffd2d85c5ae1a79c779f02"} Dec 10 15:26:05 crc kubenswrapper[4669]: I1210 15:26:05.501565 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jmngh" event={"ID":"5a969328-820f-4d0e-9618-f40a0d6fd480","Type":"ContainerStarted","Data":"609b5837c987dea1b682cf643bb68ea8e69625a9d082e0ca3e9e5270d51d18e7"} Dec 10 15:26:06 crc kubenswrapper[4669]: I1210 15:26:06.509059 4669 generic.go:334] "Generic (PLEG): container finished" podID="81135d4d-3c9e-4d4e-8f6e-876541123d69" containerID="ca5578d8866689dd2cf0bddd3bec84590a3565e4ffffd2d85c5ae1a79c779f02" exitCode=0 Dec 10 15:26:06 crc kubenswrapper[4669]: I1210 15:26:06.509480 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zgjrg" event={"ID":"81135d4d-3c9e-4d4e-8f6e-876541123d69","Type":"ContainerDied","Data":"ca5578d8866689dd2cf0bddd3bec84590a3565e4ffffd2d85c5ae1a79c779f02"} Dec 10 15:26:06 crc kubenswrapper[4669]: I1210 15:26:06.514281 4669 generic.go:334] "Generic (PLEG): container finished" podID="5a969328-820f-4d0e-9618-f40a0d6fd480" containerID="60acc07b7c3aba4e5f052ca0ab7b6163823546e47bfdafc4e721ac918f70f933" exitCode=0 Dec 10 15:26:06 crc kubenswrapper[4669]: I1210 15:26:06.514424 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jmngh" event={"ID":"5a969328-820f-4d0e-9618-f40a0d6fd480","Type":"ContainerDied","Data":"60acc07b7c3aba4e5f052ca0ab7b6163823546e47bfdafc4e721ac918f70f933"} Dec 10 15:26:06 crc kubenswrapper[4669]: I1210 15:26:06.523153 4669 generic.go:334] "Generic (PLEG): container finished" podID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerID="110d01addc3a853b584f6557a186ddb2bd47464f301393b3a685056dbe0fdbbb" exitCode=0 Dec 10 15:26:06 crc kubenswrapper[4669]: I1210 15:26:06.523414 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerDied","Data":"110d01addc3a853b584f6557a186ddb2bd47464f301393b3a685056dbe0fdbbb"} Dec 10 15:26:07 crc kubenswrapper[4669]: I1210 15:26:07.531146 4669 generic.go:334] "Generic (PLEG): container finished" podID="ebd3f28a-b015-4c75-9b7c-2bab7c397648" containerID="2e958a96afb0ed2ed75e8d7deb5309ada0e35de9194c67b75d80f55213961e1c" exitCode=0 Dec 10 15:26:07 crc kubenswrapper[4669]: I1210 15:26:07.531258 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gknnp" event={"ID":"ebd3f28a-b015-4c75-9b7c-2bab7c397648","Type":"ContainerDied","Data":"2e958a96afb0ed2ed75e8d7deb5309ada0e35de9194c67b75d80f55213961e1c"} Dec 10 15:26:07 crc kubenswrapper[4669]: I1210 15:26:07.534441 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-zgjrg" event={"ID":"81135d4d-3c9e-4d4e-8f6e-876541123d69","Type":"ContainerStarted","Data":"b6aa53695a7a0e48c1af4c2b5143cc854b82de586667090123033becc570039e"} Dec 10 15:26:07 crc kubenswrapper[4669]: I1210 15:26:07.536466 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jmngh" event={"ID":"5a969328-820f-4d0e-9618-f40a0d6fd480","Type":"ContainerStarted","Data":"08cad3dd3ce1bb2bcd6827f3bd143935edacefb7a66a0477243fd2789b8a65f8"} Dec 10 15:26:07 crc kubenswrapper[4669]: I1210 15:26:07.597987 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-zgjrg" podStartSLOduration=3.034982609 podStartE2EDuration="6.597968272s" podCreationTimestamp="2025-12-10 15:26:01 +0000 UTC" firstStartedPulling="2025-12-10 15:26:03.464921252 +0000 UTC m=+337.381867889" lastFinishedPulling="2025-12-10 15:26:07.027906925 +0000 UTC m=+340.944853552" observedRunningTime="2025-12-10 15:26:07.5893889 +0000 UTC m=+341.506335527" watchObservedRunningTime="2025-12-10 15:26:07.597968272 +0000 UTC m=+341.514914899" Dec 10 15:26:08 crc kubenswrapper[4669]: I1210 15:26:08.542777 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerStarted","Data":"d0666cf720f057418a521dee51d0427df03f5938b4ee4e7fa33bf0f8555fb69b"} Dec 10 15:26:08 crc kubenswrapper[4669]: I1210 15:26:08.546360 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gknnp" event={"ID":"ebd3f28a-b015-4c75-9b7c-2bab7c397648","Type":"ContainerStarted","Data":"fe128a69f79b4a7bd2970ccff88da9931c987c049f3ebb10234231b0978762e9"} Dec 10 15:26:08 crc kubenswrapper[4669]: I1210 15:26:08.547812 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jmngh" event={"ID":"5a969328-820f-4d0e-9618-f40a0d6fd480","Type":"ContainerDied","Data":"08cad3dd3ce1bb2bcd6827f3bd143935edacefb7a66a0477243fd2789b8a65f8"} Dec 10 15:26:08 crc kubenswrapper[4669]: I1210 15:26:08.547857 4669 generic.go:334] "Generic (PLEG): container finished" podID="5a969328-820f-4d0e-9618-f40a0d6fd480" containerID="08cad3dd3ce1bb2bcd6827f3bd143935edacefb7a66a0477243fd2789b8a65f8" exitCode=0 Dec 10 15:26:08 crc kubenswrapper[4669]: I1210 15:26:08.598729 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-xtmp8" podStartSLOduration=2.340559486 podStartE2EDuration="6.598707508s" podCreationTimestamp="2025-12-10 15:26:02 +0000 UTC" firstStartedPulling="2025-12-10 15:26:03.448398134 +0000 UTC m=+337.365344771" lastFinishedPulling="2025-12-10 15:26:07.706546166 +0000 UTC m=+341.623492793" observedRunningTime="2025-12-10 15:26:08.575336892 +0000 UTC m=+342.492283519" watchObservedRunningTime="2025-12-10 15:26:08.598707508 +0000 UTC m=+342.515654135" Dec 10 15:26:08 crc kubenswrapper[4669]: I1210 15:26:08.623510 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gknnp" podStartSLOduration=2.131622856 podStartE2EDuration="4.62349062s" podCreationTimestamp="2025-12-10 15:26:04 +0000 UTC" firstStartedPulling="2025-12-10 15:26:05.479000509 +0000 UTC m=+339.395947136" lastFinishedPulling="2025-12-10 15:26:07.970868273 +0000 UTC m=+341.887814900" observedRunningTime="2025-12-10 15:26:08.619748823 +0000 UTC m=+342.536695450" watchObservedRunningTime="2025-12-10 15:26:08.62349062 +0000 UTC m=+342.540437247" Dec 10 15:26:09 crc kubenswrapper[4669]: I1210 15:26:09.557963 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jmngh" event={"ID":"5a969328-820f-4d0e-9618-f40a0d6fd480","Type":"ContainerStarted","Data":"e84c6f32677b507c3c901168abdb9de6cd8fb0865192e316bb83d64690542019"} Dec 10 15:26:09 crc kubenswrapper[4669]: I1210 15:26:09.579676 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jmngh" podStartSLOduration=2.836069236 podStartE2EDuration="5.57965008s" podCreationTimestamp="2025-12-10 15:26:04 +0000 UTC" firstStartedPulling="2025-12-10 15:26:06.516116837 +0000 UTC m=+340.433063464" lastFinishedPulling="2025-12-10 15:26:09.259697681 +0000 UTC m=+343.176644308" observedRunningTime="2025-12-10 15:26:09.575929684 +0000 UTC m=+343.492876321" watchObservedRunningTime="2025-12-10 15:26:09.57965008 +0000 UTC m=+343.496596697" Dec 10 15:26:12 crc kubenswrapper[4669]: I1210 15:26:12.179647 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:12 crc kubenswrapper[4669]: I1210 15:26:12.179979 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:12 crc kubenswrapper[4669]: I1210 15:26:12.234100 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:12 crc kubenswrapper[4669]: I1210 15:26:12.633626 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-zgjrg" Dec 10 15:26:12 crc kubenswrapper[4669]: I1210 15:26:12.770228 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:12 crc kubenswrapper[4669]: I1210 15:26:12.770500 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:13 crc kubenswrapper[4669]: I1210 15:26:13.840020 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-xtmp8" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="registry-server" probeResult="failure" output=< Dec 10 15:26:13 crc kubenswrapper[4669]: timeout: failed to connect service ":50051" within 1s Dec 10 15:26:13 crc kubenswrapper[4669]: > Dec 10 15:26:14 crc kubenswrapper[4669]: I1210 15:26:14.564876 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:14 crc kubenswrapper[4669]: I1210 15:26:14.564951 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:14 crc kubenswrapper[4669]: I1210 15:26:14.643801 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:14 crc kubenswrapper[4669]: I1210 15:26:14.709338 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gknnp" Dec 10 15:26:15 crc kubenswrapper[4669]: I1210 15:26:15.157084 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:15 crc kubenswrapper[4669]: I1210 15:26:15.157149 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:15 crc kubenswrapper[4669]: I1210 15:26:15.216357 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:15 crc kubenswrapper[4669]: I1210 15:26:15.678312 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jmngh" Dec 10 15:26:18 crc kubenswrapper[4669]: I1210 15:26:18.383617 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq"] Dec 10 15:26:18 crc kubenswrapper[4669]: I1210 15:26:18.385047 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" podUID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" containerName="route-controller-manager" containerID="cri-o://75a3c98fa42a075b06b36a9a168e0db235282a82497dadd1ad5e456f9a025b46" gracePeriod=30 Dec 10 15:26:19 crc kubenswrapper[4669]: I1210 15:26:19.210495 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-rv774" Dec 10 15:26:19 crc kubenswrapper[4669]: I1210 15:26:19.314344 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n4dsm"] Dec 10 15:26:20 crc kubenswrapper[4669]: I1210 15:26:20.837642 4669 patch_prober.go:28] interesting pod/route-controller-manager-54d8bb9649-g9glq container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Dec 10 15:26:20 crc kubenswrapper[4669]: I1210 15:26:20.837763 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" podUID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Dec 10 15:26:21 crc kubenswrapper[4669]: I1210 15:26:21.649949 4669 generic.go:334] "Generic (PLEG): container finished" podID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" containerID="75a3c98fa42a075b06b36a9a168e0db235282a82497dadd1ad5e456f9a025b46" exitCode=0 Dec 10 15:26:21 crc kubenswrapper[4669]: I1210 15:26:21.650001 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" event={"ID":"52f05ad6-cb54-460f-b6b6-36b41e75fa85","Type":"ContainerDied","Data":"75a3c98fa42a075b06b36a9a168e0db235282a82497dadd1ad5e456f9a025b46"} Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.597898 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.627637 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx"] Dec 10 15:26:22 crc kubenswrapper[4669]: E1210 15:26:22.627915 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" containerName="route-controller-manager" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.627936 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" containerName="route-controller-manager" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.628070 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" containerName="route-controller-manager" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.628578 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.642530 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52f05ad6-cb54-460f-b6b6-36b41e75fa85-serving-cert\") pod \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.642594 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-config\") pod \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.642636 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-client-ca\") pod \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.642664 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znkk5\" (UniqueName: \"kubernetes.io/projected/52f05ad6-cb54-460f-b6b6-36b41e75fa85-kube-api-access-znkk5\") pod \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\" (UID: \"52f05ad6-cb54-460f-b6b6-36b41e75fa85\") " Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.643644 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-client-ca" (OuterVolumeSpecName: "client-ca") pod "52f05ad6-cb54-460f-b6b6-36b41e75fa85" (UID: "52f05ad6-cb54-460f-b6b6-36b41e75fa85"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.644008 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-config" (OuterVolumeSpecName: "config") pod "52f05ad6-cb54-460f-b6b6-36b41e75fa85" (UID: "52f05ad6-cb54-460f-b6b6-36b41e75fa85"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.650558 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52f05ad6-cb54-460f-b6b6-36b41e75fa85-kube-api-access-znkk5" (OuterVolumeSpecName: "kube-api-access-znkk5") pod "52f05ad6-cb54-460f-b6b6-36b41e75fa85" (UID: "52f05ad6-cb54-460f-b6b6-36b41e75fa85"). InnerVolumeSpecName "kube-api-access-znkk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.650660 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52f05ad6-cb54-460f-b6b6-36b41e75fa85-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "52f05ad6-cb54-460f-b6b6-36b41e75fa85" (UID: "52f05ad6-cb54-460f-b6b6-36b41e75fa85"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.653726 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx"] Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.655262 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" event={"ID":"52f05ad6-cb54-460f-b6b6-36b41e75fa85","Type":"ContainerDied","Data":"fe3a0ec0c26bc2c68ba32ea5fa9431fe71b4dd032f6bc5ecd3943c8a13c13f64"} Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.655302 4669 scope.go:117] "RemoveContainer" containerID="75a3c98fa42a075b06b36a9a168e0db235282a82497dadd1ad5e456f9a025b46" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.655452 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.710487 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq"] Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.713505 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54d8bb9649-g9glq"] Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744240 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hjv2\" (UniqueName: \"kubernetes.io/projected/0405df14-26b5-4049-a027-272515b41dcb-kube-api-access-5hjv2\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744341 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0405df14-26b5-4049-a027-272515b41dcb-config\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744529 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0405df14-26b5-4049-a027-272515b41dcb-serving-cert\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744720 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0405df14-26b5-4049-a027-272515b41dcb-client-ca\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744830 4669 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/52f05ad6-cb54-460f-b6b6-36b41e75fa85-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744857 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744870 4669 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/52f05ad6-cb54-460f-b6b6-36b41e75fa85-client-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.744884 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znkk5\" (UniqueName: \"kubernetes.io/projected/52f05ad6-cb54-460f-b6b6-36b41e75fa85-kube-api-access-znkk5\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.810758 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.845498 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0405df14-26b5-4049-a027-272515b41dcb-serving-cert\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.845582 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0405df14-26b5-4049-a027-272515b41dcb-client-ca\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.845608 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hjv2\" (UniqueName: \"kubernetes.io/projected/0405df14-26b5-4049-a027-272515b41dcb-kube-api-access-5hjv2\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.845657 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0405df14-26b5-4049-a027-272515b41dcb-config\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.846799 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0405df14-26b5-4049-a027-272515b41dcb-client-ca\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.846887 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0405df14-26b5-4049-a027-272515b41dcb-config\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.850774 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0405df14-26b5-4049-a027-272515b41dcb-serving-cert\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.869813 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.871156 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hjv2\" (UniqueName: \"kubernetes.io/projected/0405df14-26b5-4049-a027-272515b41dcb-kube-api-access-5hjv2\") pod \"route-controller-manager-7b5dd74956-5tbdx\" (UID: \"0405df14-26b5-4049-a027-272515b41dcb\") " pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:22 crc kubenswrapper[4669]: I1210 15:26:22.944753 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:23 crc kubenswrapper[4669]: I1210 15:26:23.332512 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx"] Dec 10 15:26:23 crc kubenswrapper[4669]: W1210 15:26:23.341914 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0405df14_26b5_4049_a027_272515b41dcb.slice/crio-feb13465ba53282113a52961063a28e4cee384461d8bf21b9355e7017b17344e WatchSource:0}: Error finding container feb13465ba53282113a52961063a28e4cee384461d8bf21b9355e7017b17344e: Status 404 returned error can't find the container with id feb13465ba53282113a52961063a28e4cee384461d8bf21b9355e7017b17344e Dec 10 15:26:23 crc kubenswrapper[4669]: I1210 15:26:23.663513 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" event={"ID":"0405df14-26b5-4049-a027-272515b41dcb","Type":"ContainerStarted","Data":"aa5ec68b92f60cc9996de9a377f2b3623dfa21b1be12f11e5cb41bf84889f480"} Dec 10 15:26:23 crc kubenswrapper[4669]: I1210 15:26:23.663837 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:23 crc kubenswrapper[4669]: I1210 15:26:23.663849 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" event={"ID":"0405df14-26b5-4049-a027-272515b41dcb","Type":"ContainerStarted","Data":"feb13465ba53282113a52961063a28e4cee384461d8bf21b9355e7017b17344e"} Dec 10 15:26:23 crc kubenswrapper[4669]: I1210 15:26:23.678017 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" podStartSLOduration=5.677998783 podStartE2EDuration="5.677998783s" podCreationTimestamp="2025-12-10 15:26:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:26:23.676533905 +0000 UTC m=+357.593480532" watchObservedRunningTime="2025-12-10 15:26:23.677998783 +0000 UTC m=+357.594945410" Dec 10 15:26:24 crc kubenswrapper[4669]: I1210 15:26:24.055824 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b5dd74956-5tbdx" Dec 10 15:26:24 crc kubenswrapper[4669]: I1210 15:26:24.405410 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52f05ad6-cb54-460f-b6b6-36b41e75fa85" path="/var/lib/kubelet/pods/52f05ad6-cb54-460f-b6b6-36b41e75fa85/volumes" Dec 10 15:26:28 crc kubenswrapper[4669]: I1210 15:26:28.744841 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:26:28 crc kubenswrapper[4669]: I1210 15:26:28.745547 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.453917 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" podUID="20977ed7-6db6-43e9-95a6-95280e2d8814" containerName="registry" containerID="cri-o://ddab4cf4bf2128ae7eeb6ccda515bf0d7c2cff1a9c8e3f05fbfdae040744ce9f" gracePeriod=30 Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.814492 4669 generic.go:334] "Generic (PLEG): container finished" podID="20977ed7-6db6-43e9-95a6-95280e2d8814" containerID="ddab4cf4bf2128ae7eeb6ccda515bf0d7c2cff1a9c8e3f05fbfdae040744ce9f" exitCode=0 Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.814554 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" event={"ID":"20977ed7-6db6-43e9-95a6-95280e2d8814","Type":"ContainerDied","Data":"ddab4cf4bf2128ae7eeb6ccda515bf0d7c2cff1a9c8e3f05fbfdae040744ce9f"} Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.875482 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.964552 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20977ed7-6db6-43e9-95a6-95280e2d8814-ca-trust-extracted\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.964645 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-certificates\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.964698 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm4qr\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-kube-api-access-cm4qr\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.964736 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-bound-sa-token\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.964788 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-trusted-ca\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.965129 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.965209 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20977ed7-6db6-43e9-95a6-95280e2d8814-installation-pull-secrets\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.965378 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-tls\") pod \"20977ed7-6db6-43e9-95a6-95280e2d8814\" (UID: \"20977ed7-6db6-43e9-95a6-95280e2d8814\") " Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.966772 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.967142 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.973291 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20977ed7-6db6-43e9-95a6-95280e2d8814-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.975044 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.975408 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-kube-api-access-cm4qr" (OuterVolumeSpecName: "kube-api-access-cm4qr") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "kube-api-access-cm4qr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.975956 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.977601 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 10 15:26:44 crc kubenswrapper[4669]: I1210 15:26:44.991544 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/20977ed7-6db6-43e9-95a6-95280e2d8814-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "20977ed7-6db6-43e9-95a6-95280e2d8814" (UID: "20977ed7-6db6-43e9-95a6-95280e2d8814"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.067851 4669 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.068103 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.068239 4669 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/20977ed7-6db6-43e9-95a6-95280e2d8814-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.068339 4669 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.068430 4669 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/20977ed7-6db6-43e9-95a6-95280e2d8814-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.068509 4669 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/20977ed7-6db6-43e9-95a6-95280e2d8814-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.068588 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm4qr\" (UniqueName: \"kubernetes.io/projected/20977ed7-6db6-43e9-95a6-95280e2d8814-kube-api-access-cm4qr\") on node \"crc\" DevicePath \"\"" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.822682 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" event={"ID":"20977ed7-6db6-43e9-95a6-95280e2d8814","Type":"ContainerDied","Data":"89f6c1e88a2088b55eaef38ca9a533acaf1914b7d673304ae158adcaf51ce8db"} Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.822748 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-n4dsm" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.822775 4669 scope.go:117] "RemoveContainer" containerID="ddab4cf4bf2128ae7eeb6ccda515bf0d7c2cff1a9c8e3f05fbfdae040744ce9f" Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.868316 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n4dsm"] Dec 10 15:26:45 crc kubenswrapper[4669]: I1210 15:26:45.874890 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-n4dsm"] Dec 10 15:26:46 crc kubenswrapper[4669]: I1210 15:26:46.406462 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20977ed7-6db6-43e9-95a6-95280e2d8814" path="/var/lib/kubelet/pods/20977ed7-6db6-43e9-95a6-95280e2d8814/volumes" Dec 10 15:26:58 crc kubenswrapper[4669]: I1210 15:26:58.745886 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:26:58 crc kubenswrapper[4669]: I1210 15:26:58.746502 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:27:28 crc kubenswrapper[4669]: I1210 15:27:28.745634 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:27:28 crc kubenswrapper[4669]: I1210 15:27:28.746390 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:27:28 crc kubenswrapper[4669]: I1210 15:27:28.746478 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:27:28 crc kubenswrapper[4669]: I1210 15:27:28.747460 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"818d0f66bc65130ac95f9c881dc856245c1d0a6c37a935776a040b20d9b6e12a"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:27:28 crc kubenswrapper[4669]: I1210 15:27:28.747567 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://818d0f66bc65130ac95f9c881dc856245c1d0a6c37a935776a040b20d9b6e12a" gracePeriod=600 Dec 10 15:27:29 crc kubenswrapper[4669]: I1210 15:27:29.101115 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="818d0f66bc65130ac95f9c881dc856245c1d0a6c37a935776a040b20d9b6e12a" exitCode=0 Dec 10 15:27:29 crc kubenswrapper[4669]: I1210 15:27:29.101292 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"818d0f66bc65130ac95f9c881dc856245c1d0a6c37a935776a040b20d9b6e12a"} Dec 10 15:27:29 crc kubenswrapper[4669]: I1210 15:27:29.101537 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"9e8fc93e3f55db44f154c5930fd8404d5d53a93663bdc6cd89f8ba24e2f46edf"} Dec 10 15:27:29 crc kubenswrapper[4669]: I1210 15:27:29.101563 4669 scope.go:117] "RemoveContainer" containerID="64171f605f5dd1407e2ad737b9734b89d29492ff12c3b4a06aca5e1d82d0b0d9" Dec 10 15:29:58 crc kubenswrapper[4669]: I1210 15:29:58.745334 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:29:58 crc kubenswrapper[4669]: I1210 15:29:58.745956 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.192391 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm"] Dec 10 15:30:00 crc kubenswrapper[4669]: E1210 15:30:00.192637 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20977ed7-6db6-43e9-95a6-95280e2d8814" containerName="registry" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.192650 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="20977ed7-6db6-43e9-95a6-95280e2d8814" containerName="registry" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.192739 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="20977ed7-6db6-43e9-95a6-95280e2d8814" containerName="registry" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.193172 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.198242 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.198540 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.247792 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm"] Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.332153 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6xzk\" (UniqueName: \"kubernetes.io/projected/1ccde319-0785-4191-9b88-407acddb4b2f-kube-api-access-z6xzk\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.332204 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ccde319-0785-4191-9b88-407acddb4b2f-secret-volume\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.332326 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ccde319-0785-4191-9b88-407acddb4b2f-config-volume\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.433446 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6xzk\" (UniqueName: \"kubernetes.io/projected/1ccde319-0785-4191-9b88-407acddb4b2f-kube-api-access-z6xzk\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.433763 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ccde319-0785-4191-9b88-407acddb4b2f-secret-volume\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.433906 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ccde319-0785-4191-9b88-407acddb4b2f-config-volume\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.435688 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ccde319-0785-4191-9b88-407acddb4b2f-config-volume\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.446275 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ccde319-0785-4191-9b88-407acddb4b2f-secret-volume\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.464168 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6xzk\" (UniqueName: \"kubernetes.io/projected/1ccde319-0785-4191-9b88-407acddb4b2f-kube-api-access-z6xzk\") pod \"collect-profiles-29423010-ncrsm\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.508693 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:00 crc kubenswrapper[4669]: I1210 15:30:00.710531 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm"] Dec 10 15:30:01 crc kubenswrapper[4669]: I1210 15:30:01.482853 4669 generic.go:334] "Generic (PLEG): container finished" podID="1ccde319-0785-4191-9b88-407acddb4b2f" containerID="aa620147608fed070016ffd20f7dba4665fe9355742cbd4ce772e1c9b12ff884" exitCode=0 Dec 10 15:30:01 crc kubenswrapper[4669]: I1210 15:30:01.482896 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" event={"ID":"1ccde319-0785-4191-9b88-407acddb4b2f","Type":"ContainerDied","Data":"aa620147608fed070016ffd20f7dba4665fe9355742cbd4ce772e1c9b12ff884"} Dec 10 15:30:01 crc kubenswrapper[4669]: I1210 15:30:01.482921 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" event={"ID":"1ccde319-0785-4191-9b88-407acddb4b2f","Type":"ContainerStarted","Data":"382dd9294ead343345af1f4dca65d66ae48b27d3ee0a00f1cf354cc1404534b8"} Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.772248 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.864149 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ccde319-0785-4191-9b88-407acddb4b2f-secret-volume\") pod \"1ccde319-0785-4191-9b88-407acddb4b2f\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.864340 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ccde319-0785-4191-9b88-407acddb4b2f-config-volume\") pod \"1ccde319-0785-4191-9b88-407acddb4b2f\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.864408 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6xzk\" (UniqueName: \"kubernetes.io/projected/1ccde319-0785-4191-9b88-407acddb4b2f-kube-api-access-z6xzk\") pod \"1ccde319-0785-4191-9b88-407acddb4b2f\" (UID: \"1ccde319-0785-4191-9b88-407acddb4b2f\") " Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.865078 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ccde319-0785-4191-9b88-407acddb4b2f-config-volume" (OuterVolumeSpecName: "config-volume") pod "1ccde319-0785-4191-9b88-407acddb4b2f" (UID: "1ccde319-0785-4191-9b88-407acddb4b2f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.869338 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ccde319-0785-4191-9b88-407acddb4b2f-kube-api-access-z6xzk" (OuterVolumeSpecName: "kube-api-access-z6xzk") pod "1ccde319-0785-4191-9b88-407acddb4b2f" (UID: "1ccde319-0785-4191-9b88-407acddb4b2f"). InnerVolumeSpecName "kube-api-access-z6xzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.872359 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1ccde319-0785-4191-9b88-407acddb4b2f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1ccde319-0785-4191-9b88-407acddb4b2f" (UID: "1ccde319-0785-4191-9b88-407acddb4b2f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.965912 4669 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1ccde319-0785-4191-9b88-407acddb4b2f-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.965951 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6xzk\" (UniqueName: \"kubernetes.io/projected/1ccde319-0785-4191-9b88-407acddb4b2f-kube-api-access-z6xzk\") on node \"crc\" DevicePath \"\"" Dec 10 15:30:02 crc kubenswrapper[4669]: I1210 15:30:02.965964 4669 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1ccde319-0785-4191-9b88-407acddb4b2f-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:30:03 crc kubenswrapper[4669]: I1210 15:30:03.496333 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" event={"ID":"1ccde319-0785-4191-9b88-407acddb4b2f","Type":"ContainerDied","Data":"382dd9294ead343345af1f4dca65d66ae48b27d3ee0a00f1cf354cc1404534b8"} Dec 10 15:30:03 crc kubenswrapper[4669]: I1210 15:30:03.496377 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="382dd9294ead343345af1f4dca65d66ae48b27d3ee0a00f1cf354cc1404534b8" Dec 10 15:30:03 crc kubenswrapper[4669]: I1210 15:30:03.496395 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423010-ncrsm" Dec 10 15:30:28 crc kubenswrapper[4669]: I1210 15:30:28.744720 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:30:28 crc kubenswrapper[4669]: I1210 15:30:28.745729 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.011980 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-b99ls"] Dec 10 15:30:54 crc kubenswrapper[4669]: E1210 15:30:54.012823 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ccde319-0785-4191-9b88-407acddb4b2f" containerName="collect-profiles" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.012839 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ccde319-0785-4191-9b88-407acddb4b2f" containerName="collect-profiles" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.012953 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ccde319-0785-4191-9b88-407acddb4b2f" containerName="collect-profiles" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.013454 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.016174 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-rbjgt"] Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.016806 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-rbjgt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.019123 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.019152 4669 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-29ll5" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.032325 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-b99ls"] Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.034439 4669 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-66v6l" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.035592 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.038044 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-rbjgt"] Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.047471 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-547d8"] Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.048365 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.054992 4669 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-dcq79" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.059719 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-547d8"] Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.182243 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzg42\" (UniqueName: \"kubernetes.io/projected/0de15170-8813-4dc9-871f-dc6f85a01a7c-kube-api-access-fzg42\") pod \"cert-manager-5b446d88c5-rbjgt\" (UID: \"0de15170-8813-4dc9-871f-dc6f85a01a7c\") " pod="cert-manager/cert-manager-5b446d88c5-rbjgt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.182420 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpmmf\" (UniqueName: \"kubernetes.io/projected/4a677f57-8f30-4573-84fd-75b837e3be7d-kube-api-access-rpmmf\") pod \"cert-manager-cainjector-7f985d654d-b99ls\" (UID: \"4a677f57-8f30-4573-84fd-75b837e3be7d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.182499 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qkr8\" (UniqueName: \"kubernetes.io/projected/f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5-kube-api-access-2qkr8\") pod \"cert-manager-webhook-5655c58dd6-547d8\" (UID: \"f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.287639 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qkr8\" (UniqueName: \"kubernetes.io/projected/f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5-kube-api-access-2qkr8\") pod \"cert-manager-webhook-5655c58dd6-547d8\" (UID: \"f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.287720 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzg42\" (UniqueName: \"kubernetes.io/projected/0de15170-8813-4dc9-871f-dc6f85a01a7c-kube-api-access-fzg42\") pod \"cert-manager-5b446d88c5-rbjgt\" (UID: \"0de15170-8813-4dc9-871f-dc6f85a01a7c\") " pod="cert-manager/cert-manager-5b446d88c5-rbjgt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.287781 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpmmf\" (UniqueName: \"kubernetes.io/projected/4a677f57-8f30-4573-84fd-75b837e3be7d-kube-api-access-rpmmf\") pod \"cert-manager-cainjector-7f985d654d-b99ls\" (UID: \"4a677f57-8f30-4573-84fd-75b837e3be7d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.308801 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpmmf\" (UniqueName: \"kubernetes.io/projected/4a677f57-8f30-4573-84fd-75b837e3be7d-kube-api-access-rpmmf\") pod \"cert-manager-cainjector-7f985d654d-b99ls\" (UID: \"4a677f57-8f30-4573-84fd-75b837e3be7d\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.310533 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzg42\" (UniqueName: \"kubernetes.io/projected/0de15170-8813-4dc9-871f-dc6f85a01a7c-kube-api-access-fzg42\") pod \"cert-manager-5b446d88c5-rbjgt\" (UID: \"0de15170-8813-4dc9-871f-dc6f85a01a7c\") " pod="cert-manager/cert-manager-5b446d88c5-rbjgt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.310773 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qkr8\" (UniqueName: \"kubernetes.io/projected/f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5-kube-api-access-2qkr8\") pod \"cert-manager-webhook-5655c58dd6-547d8\" (UID: \"f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.331429 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.341500 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-rbjgt" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.361299 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.592821 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-b99ls"] Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.627926 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.669922 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-547d8"] Dec 10 15:30:54 crc kubenswrapper[4669]: W1210 15:30:54.674806 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3df5787_f3a0_47cb_a6bc_ca2ffe0095e5.slice/crio-6a219123e87d58066db172488a1c914e191c94c6b6617b90de9b235755de881f WatchSource:0}: Error finding container 6a219123e87d58066db172488a1c914e191c94c6b6617b90de9b235755de881f: Status 404 returned error can't find the container with id 6a219123e87d58066db172488a1c914e191c94c6b6617b90de9b235755de881f Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.835741 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" event={"ID":"4a677f57-8f30-4573-84fd-75b837e3be7d","Type":"ContainerStarted","Data":"5080f4b27378226c25c5922072b0b3f29a25e34dffa7e8f5b9749d920546e3ba"} Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.836787 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" event={"ID":"f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5","Type":"ContainerStarted","Data":"6a219123e87d58066db172488a1c914e191c94c6b6617b90de9b235755de881f"} Dec 10 15:30:54 crc kubenswrapper[4669]: I1210 15:30:54.862382 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-rbjgt"] Dec 10 15:30:54 crc kubenswrapper[4669]: W1210 15:30:54.868802 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0de15170_8813_4dc9_871f_dc6f85a01a7c.slice/crio-c4c3da4e72ee9d0e931c7d3fc3fa0c86b93d41954b2387f6f7c2148c2821681f WatchSource:0}: Error finding container c4c3da4e72ee9d0e931c7d3fc3fa0c86b93d41954b2387f6f7c2148c2821681f: Status 404 returned error can't find the container with id c4c3da4e72ee9d0e931c7d3fc3fa0c86b93d41954b2387f6f7c2148c2821681f Dec 10 15:30:55 crc kubenswrapper[4669]: I1210 15:30:55.843768 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-rbjgt" event={"ID":"0de15170-8813-4dc9-871f-dc6f85a01a7c","Type":"ContainerStarted","Data":"c4c3da4e72ee9d0e931c7d3fc3fa0c86b93d41954b2387f6f7c2148c2821681f"} Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.745420 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.745790 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.745839 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.746522 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9e8fc93e3f55db44f154c5930fd8404d5d53a93663bdc6cd89f8ba24e2f46edf"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.746593 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://9e8fc93e3f55db44f154c5930fd8404d5d53a93663bdc6cd89f8ba24e2f46edf" gracePeriod=600 Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.871095 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" event={"ID":"4a677f57-8f30-4573-84fd-75b837e3be7d","Type":"ContainerStarted","Data":"8d3c8fce117a85c5dfa4a448d42f00b446c7d6bf60df00dca42574131eda06cc"} Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.873424 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-rbjgt" event={"ID":"0de15170-8813-4dc9-871f-dc6f85a01a7c","Type":"ContainerStarted","Data":"ba633fc581a1d126fecc069d4bf019734dc8391d9d392839acf797910d45688d"} Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.878513 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="9e8fc93e3f55db44f154c5930fd8404d5d53a93663bdc6cd89f8ba24e2f46edf" exitCode=0 Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.878596 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"9e8fc93e3f55db44f154c5930fd8404d5d53a93663bdc6cd89f8ba24e2f46edf"} Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.878627 4669 scope.go:117] "RemoveContainer" containerID="818d0f66bc65130ac95f9c881dc856245c1d0a6c37a935776a040b20d9b6e12a" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.881082 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" event={"ID":"f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5","Type":"ContainerStarted","Data":"c8134cdd381c829c62fdddad38ec1996d215c95f2d36e05c4816aa8699cb533a"} Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.881576 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.915072 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-b99ls" podStartSLOduration=2.784252568 podStartE2EDuration="5.915055576s" podCreationTimestamp="2025-12-10 15:30:53 +0000 UTC" firstStartedPulling="2025-12-10 15:30:54.627614647 +0000 UTC m=+628.544561274" lastFinishedPulling="2025-12-10 15:30:57.758417655 +0000 UTC m=+631.675364282" observedRunningTime="2025-12-10 15:30:58.893400285 +0000 UTC m=+632.810346912" watchObservedRunningTime="2025-12-10 15:30:58.915055576 +0000 UTC m=+632.832002203" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.916884 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" podStartSLOduration=1.8294902469999998 podStartE2EDuration="4.916875761s" podCreationTimestamp="2025-12-10 15:30:54 +0000 UTC" firstStartedPulling="2025-12-10 15:30:54.677390648 +0000 UTC m=+628.594337275" lastFinishedPulling="2025-12-10 15:30:57.764776162 +0000 UTC m=+631.681722789" observedRunningTime="2025-12-10 15:30:58.9164343 +0000 UTC m=+632.833380947" watchObservedRunningTime="2025-12-10 15:30:58.916875761 +0000 UTC m=+632.833822388" Dec 10 15:30:58 crc kubenswrapper[4669]: I1210 15:30:58.935345 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-rbjgt" podStartSLOduration=2.990402679 podStartE2EDuration="5.935322784s" podCreationTimestamp="2025-12-10 15:30:53 +0000 UTC" firstStartedPulling="2025-12-10 15:30:54.870893869 +0000 UTC m=+628.787840506" lastFinishedPulling="2025-12-10 15:30:57.815813984 +0000 UTC m=+631.732760611" observedRunningTime="2025-12-10 15:30:58.928331993 +0000 UTC m=+632.845278620" watchObservedRunningTime="2025-12-10 15:30:58.935322784 +0000 UTC m=+632.852269411" Dec 10 15:30:59 crc kubenswrapper[4669]: I1210 15:30:59.889208 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"c16924e004007c4e5fe251725834049c68819cdeff3df1d8eef2127a3516ef0e"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.364413 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-547d8" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386260 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6hbdc"] Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386803 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-controller" containerID="cri-o://84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386861 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="northd" containerID="cri-o://623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386945 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="nbdb" containerID="cri-o://0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386949 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386951 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="sbdb" containerID="cri-o://03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.386994 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-node" containerID="cri-o://43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.387024 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-acl-logging" containerID="cri-o://1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.444199 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" containerID="cri-o://b3770a9b96f7ba48f28990dbe8f32b4fa6f0e48d4ed8ea413dbb43316bd7c79b" gracePeriod=30 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.918417 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovnkube-controller/3.log" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.921594 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovn-acl-logging/0.log" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922440 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovn-controller/0.log" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922831 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="b3770a9b96f7ba48f28990dbe8f32b4fa6f0e48d4ed8ea413dbb43316bd7c79b" exitCode=0 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922860 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6" exitCode=0 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922869 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff" exitCode=0 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922876 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a" exitCode=0 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922882 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057" exitCode=0 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922889 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26" exitCode=0 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922896 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81" exitCode=143 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922902 4669 generic.go:334] "Generic (PLEG): container finished" podID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerID="84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad" exitCode=143 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922922 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"b3770a9b96f7ba48f28990dbe8f32b4fa6f0e48d4ed8ea413dbb43316bd7c79b"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922981 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.922999 4669 scope.go:117] "RemoveContainer" containerID="491f95c8936085131d8f242b89d15aa9d10fb3478242014b214bc73aa9d78904" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.923004 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.923025 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.923044 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.923063 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.923082 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.923102 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.924869 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/2.log" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.925542 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/1.log" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.925606 4669 generic.go:334] "Generic (PLEG): container finished" podID="3dda8be1-e5bc-42a3-820e-4285b75bf8c2" containerID="203ebc8b9d6454da6c96a0e98afe28bf69e80814573f297ae868fdd3a15ee9da" exitCode=2 Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.925644 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerDied","Data":"203ebc8b9d6454da6c96a0e98afe28bf69e80814573f297ae868fdd3a15ee9da"} Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.926232 4669 scope.go:117] "RemoveContainer" containerID="203ebc8b9d6454da6c96a0e98afe28bf69e80814573f297ae868fdd3a15ee9da" Dec 10 15:31:04 crc kubenswrapper[4669]: E1210 15:31:04.926513 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-s4g62_openshift-multus(3dda8be1-e5bc-42a3-820e-4285b75bf8c2)\"" pod="openshift-multus/multus-s4g62" podUID="3dda8be1-e5bc-42a3-820e-4285b75bf8c2" Dec 10 15:31:04 crc kubenswrapper[4669]: I1210 15:31:04.967287 4669 scope.go:117] "RemoveContainer" containerID="235e4dd15023530aa902828e052c1ce7456a04455199243c657f833fe3700778" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.141714 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovn-acl-logging/0.log" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.144097 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovn-controller/0.log" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.145955 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.211662 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-pk4k8"] Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.211872 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-node" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.211884 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-node" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.211895 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="northd" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.211900 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="northd" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.211909 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.211916 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.211924 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-acl-logging" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.211933 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-acl-logging" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.211942 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.211950 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.211960 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212047 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212057 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="nbdb" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212063 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="nbdb" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212070 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212078 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212084 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="sbdb" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212089 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="sbdb" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212099 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212104 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212112 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kubecfg-setup" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212118 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kubecfg-setup" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212127 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212133 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212241 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="nbdb" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212252 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-node" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212263 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212272 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="kube-rbac-proxy-ovn-metrics" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212279 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212286 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212292 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212300 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovn-acl-logging" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212308 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212314 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212323 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="northd" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212330 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="sbdb" Dec 10 15:31:05 crc kubenswrapper[4669]: E1210 15:31:05.212422 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.212429 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" containerName="ovnkube-controller" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.213884 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.333801 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-ovn\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.333840 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-log-socket\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.333863 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-systemd-units\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.333896 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovn-node-metrics-cert\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.333921 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-node-log\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334878 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334912 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-script-lib\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334928 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-kubelet\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334957 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-slash\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334982 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-openvswitch\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.333997 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335006 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-88p9n\" (UniqueName: \"kubernetes.io/projected/8dc35dac-41a2-4bc1-ad26-5f515126921e-kube-api-access-88p9n\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334004 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-log-socket" (OuterVolumeSpecName: "log-socket") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335005 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334069 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-node-log" (OuterVolumeSpecName: "node-log") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335051 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334063 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335072 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335021 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-bin\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.334973 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335153 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-netd\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335177 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-etc-openvswitch\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335202 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-netns\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335243 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-systemd\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335258 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335276 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-config\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335359 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-ovn-kubernetes\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335383 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-var-lib-openvswitch\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335404 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-env-overrides\") pod \"8dc35dac-41a2-4bc1-ad26-5f515126921e\" (UID: \"8dc35dac-41a2-4bc1-ad26-5f515126921e\") " Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335290 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335303 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335544 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-log-socket\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335565 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-cni-netd\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335563 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-slash" (OuterVolumeSpecName: "host-slash") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335576 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335591 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335580 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-node-log\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335627 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335643 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-systemd-units\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335662 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-cni-bin\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335719 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335797 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-run-ovn-kubernetes\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335858 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovnkube-script-lib\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335876 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335902 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovn-node-metrics-cert\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335961 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-etc-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.335994 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-slash\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336026 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-env-overrides\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336039 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336129 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovnkube-config\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336160 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-run-netns\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336176 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-ovn\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336195 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-systemd\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336247 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336268 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zlvvz\" (UniqueName: \"kubernetes.io/projected/90b3fc9c-6e1a-4378-a056-0eb94b83e775-kube-api-access-zlvvz\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336288 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-var-lib-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336304 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-kubelet\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336377 4669 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-node-log\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336387 4669 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336397 4669 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336405 4669 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336414 4669 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-slash\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336422 4669 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336430 4669 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336438 4669 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336445 4669 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336453 4669 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336460 4669 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336467 4669 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336476 4669 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336483 4669 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/8dc35dac-41a2-4bc1-ad26-5f515126921e-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336498 4669 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336506 4669 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-log-socket\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.336513 4669 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.339913 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.340161 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8dc35dac-41a2-4bc1-ad26-5f515126921e-kube-api-access-88p9n" (OuterVolumeSpecName: "kube-api-access-88p9n") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "kube-api-access-88p9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.346975 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "8dc35dac-41a2-4bc1-ad26-5f515126921e" (UID: "8dc35dac-41a2-4bc1-ad26-5f515126921e"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437613 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-run-ovn-kubernetes\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437658 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovnkube-script-lib\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437677 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovn-node-metrics-cert\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437698 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-etc-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437725 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-slash\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437746 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-env-overrides\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437747 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-run-ovn-kubernetes\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437777 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovnkube-config\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437808 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-slash\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437822 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-run-netns\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437936 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-run-netns\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437950 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-etc-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437980 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-ovn\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.437965 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-ovn\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438027 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-systemd\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438061 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438091 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zlvvz\" (UniqueName: \"kubernetes.io/projected/90b3fc9c-6e1a-4378-a056-0eb94b83e775-kube-api-access-zlvvz\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438115 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-var-lib-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438134 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-systemd\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438185 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-kubelet\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438823 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-log-socket\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438855 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-node-log\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438888 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-cni-netd\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438925 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-systemd-units\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438944 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-cni-bin\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.438966 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439055 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439077 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-env-overrides\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439209 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovnkube-config\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439330 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovnkube-script-lib\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439384 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-cni-bin\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439390 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-log-socket\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439419 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-node-log\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439453 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-cni-netd\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439481 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-systemd-units\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439508 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-host-kubelet\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439547 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-var-lib-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439647 4669 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/8dc35dac-41a2-4bc1-ad26-5f515126921e-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439665 4669 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/8dc35dac-41a2-4bc1-ad26-5f515126921e-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.439682 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-88p9n\" (UniqueName: \"kubernetes.io/projected/8dc35dac-41a2-4bc1-ad26-5f515126921e-kube-api-access-88p9n\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.441129 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/90b3fc9c-6e1a-4378-a056-0eb94b83e775-ovn-node-metrics-cert\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.448591 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/90b3fc9c-6e1a-4378-a056-0eb94b83e775-run-openvswitch\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.462452 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zlvvz\" (UniqueName: \"kubernetes.io/projected/90b3fc9c-6e1a-4378-a056-0eb94b83e775-kube-api-access-zlvvz\") pod \"ovnkube-node-pk4k8\" (UID: \"90b3fc9c-6e1a-4378-a056-0eb94b83e775\") " pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.529372 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.931003 4669 generic.go:334] "Generic (PLEG): container finished" podID="90b3fc9c-6e1a-4378-a056-0eb94b83e775" containerID="2e426122ac3a9cd0412ea64c1aa181dcb8d76c2db8b06dad6f9470cb8d0707d1" exitCode=0 Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.931054 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerDied","Data":"2e426122ac3a9cd0412ea64c1aa181dcb8d76c2db8b06dad6f9470cb8d0707d1"} Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.931078 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"9050587871bf5f420ee888543b8f165b6afaf7b86d8c5682f0b636b5b0e849bf"} Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.935922 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovn-acl-logging/0.log" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.936438 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-6hbdc_8dc35dac-41a2-4bc1-ad26-5f515126921e/ovn-controller/0.log" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.936849 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" event={"ID":"8dc35dac-41a2-4bc1-ad26-5f515126921e","Type":"ContainerDied","Data":"8069245d67d54f793c1dacdded1d83c098ff626276b86e3a9ed3336eac943993"} Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.936890 4669 scope.go:117] "RemoveContainer" containerID="b3770a9b96f7ba48f28990dbe8f32b4fa6f0e48d4ed8ea413dbb43316bd7c79b" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.936963 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-6hbdc" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.938495 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/2.log" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.955159 4669 scope.go:117] "RemoveContainer" containerID="03e6bc7b3b5c2963591bd6221932dab35720fa6f219cb68ea8baf4d60fcde0b6" Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.976398 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6hbdc"] Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.986366 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-6hbdc"] Dec 10 15:31:05 crc kubenswrapper[4669]: I1210 15:31:05.989575 4669 scope.go:117] "RemoveContainer" containerID="0e8c83184a29a08fa95f551b42c027ed354b1707ae69a0b83fcaf80248a2d8ff" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.015466 4669 scope.go:117] "RemoveContainer" containerID="623ddcb7b83bf8bc38ddadef9652d51a2af985463094787b8fb31588bfc3de6a" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.039777 4669 scope.go:117] "RemoveContainer" containerID="ed529179c7b17ac60a4c39ea93cf635c36275241c348e0de2c5efd1204fb2057" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.073834 4669 scope.go:117] "RemoveContainer" containerID="43a102b0cb64a752f6fd2893a7ae952be677526c77fa37a6bc07a36f07727d26" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.089789 4669 scope.go:117] "RemoveContainer" containerID="1a808a1957e9cb81ce9f8a16629e8ad5bf7a0ef8969d535b7aecf8da3c1bdf81" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.102607 4669 scope.go:117] "RemoveContainer" containerID="84c9eb517977a01bd4717006e888579bc2b35ab494c64cb2a23af83339aa7fad" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.116670 4669 scope.go:117] "RemoveContainer" containerID="37fdd3c9bf40823b08105867cd5e553e02094b2f7b073e9a2d584c6c0d8b593d" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.404058 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8dc35dac-41a2-4bc1-ad26-5f515126921e" path="/var/lib/kubelet/pods/8dc35dac-41a2-4bc1-ad26-5f515126921e/volumes" Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.949488 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"4908f2cdbfbe9c2532121da14bed7ce1942289dbb0ba4ad830bd34adc394df74"} Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.949535 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"571d80851fb89836a4cf9495fb6eead6914b6993e02411ede18742c6a128552b"} Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.949548 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"44d60c5b8a4a1ac6dea281c8c45d4e8a3356bc5bd43cdaff642882371e583006"} Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.949559 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"a86dd0c5be198e270cccf0d8693d8e9eb0db8d3c81f9189e0b78311969ba89c0"} Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.949570 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"065744a607de6935918443a8ba8359017919fbf7058530b53b285c0f9ba6ed3c"} Dec 10 15:31:06 crc kubenswrapper[4669]: I1210 15:31:06.949579 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"64ee2f414eabcc11147fce675326c7789bc488c3db1b4b37920b65306c93f226"} Dec 10 15:31:08 crc kubenswrapper[4669]: I1210 15:31:08.963535 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"5102e2d7d4eb5e21bd43adfba1a64f44a52fcc4cc75661f237a3a4c166783320"} Dec 10 15:31:11 crc kubenswrapper[4669]: I1210 15:31:11.983655 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" event={"ID":"90b3fc9c-6e1a-4378-a056-0eb94b83e775","Type":"ContainerStarted","Data":"4552626804d1cd90000fa679fca5b87097cfb183aed86d4ac08ca8146040fdd9"} Dec 10 15:31:11 crc kubenswrapper[4669]: I1210 15:31:11.983958 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:11 crc kubenswrapper[4669]: I1210 15:31:11.983973 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:11 crc kubenswrapper[4669]: I1210 15:31:11.983981 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:12 crc kubenswrapper[4669]: I1210 15:31:12.024586 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" podStartSLOduration=7.024564816 podStartE2EDuration="7.024564816s" podCreationTimestamp="2025-12-10 15:31:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:31:12.019286516 +0000 UTC m=+645.936233143" watchObservedRunningTime="2025-12-10 15:31:12.024564816 +0000 UTC m=+645.941511443" Dec 10 15:31:12 crc kubenswrapper[4669]: I1210 15:31:12.027536 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:12 crc kubenswrapper[4669]: I1210 15:31:12.029382 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:15 crc kubenswrapper[4669]: I1210 15:31:15.397967 4669 scope.go:117] "RemoveContainer" containerID="203ebc8b9d6454da6c96a0e98afe28bf69e80814573f297ae868fdd3a15ee9da" Dec 10 15:31:15 crc kubenswrapper[4669]: E1210 15:31:15.398624 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-s4g62_openshift-multus(3dda8be1-e5bc-42a3-820e-4285b75bf8c2)\"" pod="openshift-multus/multus-s4g62" podUID="3dda8be1-e5bc-42a3-820e-4285b75bf8c2" Dec 10 15:31:30 crc kubenswrapper[4669]: I1210 15:31:30.398759 4669 scope.go:117] "RemoveContainer" containerID="203ebc8b9d6454da6c96a0e98afe28bf69e80814573f297ae868fdd3a15ee9da" Dec 10 15:31:31 crc kubenswrapper[4669]: I1210 15:31:31.106649 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-s4g62_3dda8be1-e5bc-42a3-820e-4285b75bf8c2/kube-multus/2.log" Dec 10 15:31:31 crc kubenswrapper[4669]: I1210 15:31:31.107049 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-s4g62" event={"ID":"3dda8be1-e5bc-42a3-820e-4285b75bf8c2","Type":"ContainerStarted","Data":"a8a813fc1000cc043b263186ffe6ad57e5d9e11addbd8372983e39b0f3c8294c"} Dec 10 15:31:35 crc kubenswrapper[4669]: I1210 15:31:35.555833 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-pk4k8" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.349232 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4"] Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.350563 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.353665 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.368366 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4"] Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.473302 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.473389 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz6px\" (UniqueName: \"kubernetes.io/projected/e5b4e814-6b86-460c-9e84-130e84192f88-kube-api-access-dz6px\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.473414 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.574085 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.574191 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz6px\" (UniqueName: \"kubernetes.io/projected/e5b4e814-6b86-460c-9e84-130e84192f88-kube-api-access-dz6px\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.574251 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.575051 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.575579 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.594248 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz6px\" (UniqueName: \"kubernetes.io/projected/e5b4e814-6b86-460c-9e84-130e84192f88-kube-api-access-dz6px\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.667503 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:45 crc kubenswrapper[4669]: I1210 15:31:45.862693 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4"] Dec 10 15:31:46 crc kubenswrapper[4669]: I1210 15:31:46.224209 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" event={"ID":"e5b4e814-6b86-460c-9e84-130e84192f88","Type":"ContainerStarted","Data":"f60db5533543e3f261ec3cecc75c2eb15393d4028435a709317066e1adf9b083"} Dec 10 15:31:46 crc kubenswrapper[4669]: I1210 15:31:46.224285 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" event={"ID":"e5b4e814-6b86-460c-9e84-130e84192f88","Type":"ContainerStarted","Data":"fbc574a0ce6c480e1b61da3f8b13bd8094f3bcc3507b51666a5a22892eaa2fa1"} Dec 10 15:31:47 crc kubenswrapper[4669]: I1210 15:31:47.229626 4669 generic.go:334] "Generic (PLEG): container finished" podID="e5b4e814-6b86-460c-9e84-130e84192f88" containerID="f60db5533543e3f261ec3cecc75c2eb15393d4028435a709317066e1adf9b083" exitCode=0 Dec 10 15:31:47 crc kubenswrapper[4669]: I1210 15:31:47.229691 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" event={"ID":"e5b4e814-6b86-460c-9e84-130e84192f88","Type":"ContainerDied","Data":"f60db5533543e3f261ec3cecc75c2eb15393d4028435a709317066e1adf9b083"} Dec 10 15:31:49 crc kubenswrapper[4669]: I1210 15:31:49.247680 4669 generic.go:334] "Generic (PLEG): container finished" podID="e5b4e814-6b86-460c-9e84-130e84192f88" containerID="a86ef170e7246faef837b2307329ae06585850e8fad5732ae0782e0828628e34" exitCode=0 Dec 10 15:31:49 crc kubenswrapper[4669]: I1210 15:31:49.247826 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" event={"ID":"e5b4e814-6b86-460c-9e84-130e84192f88","Type":"ContainerDied","Data":"a86ef170e7246faef837b2307329ae06585850e8fad5732ae0782e0828628e34"} Dec 10 15:31:50 crc kubenswrapper[4669]: I1210 15:31:50.258712 4669 generic.go:334] "Generic (PLEG): container finished" podID="e5b4e814-6b86-460c-9e84-130e84192f88" containerID="2cd8ad75759ab75cc0bfee03670914c8a21c629363fca15516e73d3655e9a7ed" exitCode=0 Dec 10 15:31:50 crc kubenswrapper[4669]: I1210 15:31:50.259081 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" event={"ID":"e5b4e814-6b86-460c-9e84-130e84192f88","Type":"ContainerDied","Data":"2cd8ad75759ab75cc0bfee03670914c8a21c629363fca15516e73d3655e9a7ed"} Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.578002 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.650178 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dz6px\" (UniqueName: \"kubernetes.io/projected/e5b4e814-6b86-460c-9e84-130e84192f88-kube-api-access-dz6px\") pod \"e5b4e814-6b86-460c-9e84-130e84192f88\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.650302 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-util\") pod \"e5b4e814-6b86-460c-9e84-130e84192f88\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.650350 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-bundle\") pod \"e5b4e814-6b86-460c-9e84-130e84192f88\" (UID: \"e5b4e814-6b86-460c-9e84-130e84192f88\") " Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.650966 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-bundle" (OuterVolumeSpecName: "bundle") pod "e5b4e814-6b86-460c-9e84-130e84192f88" (UID: "e5b4e814-6b86-460c-9e84-130e84192f88"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.659280 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b4e814-6b86-460c-9e84-130e84192f88-kube-api-access-dz6px" (OuterVolumeSpecName: "kube-api-access-dz6px") pod "e5b4e814-6b86-460c-9e84-130e84192f88" (UID: "e5b4e814-6b86-460c-9e84-130e84192f88"). InnerVolumeSpecName "kube-api-access-dz6px". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.665910 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-util" (OuterVolumeSpecName: "util") pod "e5b4e814-6b86-460c-9e84-130e84192f88" (UID: "e5b4e814-6b86-460c-9e84-130e84192f88"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.751092 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dz6px\" (UniqueName: \"kubernetes.io/projected/e5b4e814-6b86-460c-9e84-130e84192f88-kube-api-access-dz6px\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.751410 4669 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-util\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:51 crc kubenswrapper[4669]: I1210 15:31:51.751486 4669 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/e5b4e814-6b86-460c-9e84-130e84192f88-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:31:52 crc kubenswrapper[4669]: I1210 15:31:52.274840 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" event={"ID":"e5b4e814-6b86-460c-9e84-130e84192f88","Type":"ContainerDied","Data":"fbc574a0ce6c480e1b61da3f8b13bd8094f3bcc3507b51666a5a22892eaa2fa1"} Dec 10 15:31:52 crc kubenswrapper[4669]: I1210 15:31:52.274874 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fbc574a0ce6c480e1b61da3f8b13bd8094f3bcc3507b51666a5a22892eaa2fa1" Dec 10 15:31:52 crc kubenswrapper[4669]: I1210 15:31:52.274919 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.795897 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7"] Dec 10 15:31:53 crc kubenswrapper[4669]: E1210 15:31:53.797196 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="util" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.797344 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="util" Dec 10 15:31:53 crc kubenswrapper[4669]: E1210 15:31:53.797400 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="pull" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.797452 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="pull" Dec 10 15:31:53 crc kubenswrapper[4669]: E1210 15:31:53.797501 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="extract" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.797549 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="extract" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.797673 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b4e814-6b86-460c-9e84-130e84192f88" containerName="extract" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.798074 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.800483 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.800950 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.804332 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-mq5vq" Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.819074 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7"] Dec 10 15:31:53 crc kubenswrapper[4669]: I1210 15:31:53.977518 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpxv5\" (UniqueName: \"kubernetes.io/projected/41cd6a49-032a-4f5a-988b-1af1808b5ae5-kube-api-access-gpxv5\") pod \"nmstate-operator-5b5b58f5c8-ghbc7\" (UID: \"41cd6a49-032a-4f5a-988b-1af1808b5ae5\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" Dec 10 15:31:54 crc kubenswrapper[4669]: I1210 15:31:54.078181 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpxv5\" (UniqueName: \"kubernetes.io/projected/41cd6a49-032a-4f5a-988b-1af1808b5ae5-kube-api-access-gpxv5\") pod \"nmstate-operator-5b5b58f5c8-ghbc7\" (UID: \"41cd6a49-032a-4f5a-988b-1af1808b5ae5\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" Dec 10 15:31:54 crc kubenswrapper[4669]: I1210 15:31:54.099473 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpxv5\" (UniqueName: \"kubernetes.io/projected/41cd6a49-032a-4f5a-988b-1af1808b5ae5-kube-api-access-gpxv5\") pod \"nmstate-operator-5b5b58f5c8-ghbc7\" (UID: \"41cd6a49-032a-4f5a-988b-1af1808b5ae5\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" Dec 10 15:31:54 crc kubenswrapper[4669]: I1210 15:31:54.111064 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" Dec 10 15:31:54 crc kubenswrapper[4669]: I1210 15:31:54.582035 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7"] Dec 10 15:31:55 crc kubenswrapper[4669]: I1210 15:31:55.290960 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" event={"ID":"41cd6a49-032a-4f5a-988b-1af1808b5ae5","Type":"ContainerStarted","Data":"f4182d2e575b7f85e10556d553ea829195811e996d423e191c84f3469ee0bafd"} Dec 10 15:31:58 crc kubenswrapper[4669]: I1210 15:31:58.317540 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" event={"ID":"41cd6a49-032a-4f5a-988b-1af1808b5ae5","Type":"ContainerStarted","Data":"04e8f0e6c735a4df86614504f36360aea96f0e5c78f059fa6d4851c99790d4d2"} Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.465932 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-ghbc7" podStartSLOduration=3.876969629 podStartE2EDuration="6.465916979s" podCreationTimestamp="2025-12-10 15:31:53 +0000 UTC" firstStartedPulling="2025-12-10 15:31:54.616058236 +0000 UTC m=+688.533004863" lastFinishedPulling="2025-12-10 15:31:57.205005586 +0000 UTC m=+691.121952213" observedRunningTime="2025-12-10 15:31:58.344429084 +0000 UTC m=+692.261375741" watchObservedRunningTime="2025-12-10 15:31:59.465916979 +0000 UTC m=+693.382863606" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.467260 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.468019 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.472110 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-f5jkl" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.483735 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.494491 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.495386 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.498137 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.514568 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.556941 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-bh52g"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.557590 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.627341 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.627951 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.630522 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-jfnpz" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.630727 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.631024 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.644427 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5zsd\" (UniqueName: \"kubernetes.io/projected/b9806903-d8e2-4131-9bf1-14d0e1c7597b-kube-api-access-l5zsd\") pod \"nmstate-metrics-7f946cbc9-9vchl\" (UID: \"b9806903-d8e2-4131-9bf1-14d0e1c7597b\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.644473 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/05ce5aaa-1c86-4f42-add8-f24bb1789c02-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-vl7cd\" (UID: \"05ce5aaa-1c86-4f42-add8-f24bb1789c02\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.644540 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6dz9j\" (UniqueName: \"kubernetes.io/projected/05ce5aaa-1c86-4f42-add8-f24bb1789c02-kube-api-access-6dz9j\") pod \"nmstate-webhook-5f6d4c5ccb-vl7cd\" (UID: \"05ce5aaa-1c86-4f42-add8-f24bb1789c02\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.672754 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745451 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5zsd\" (UniqueName: \"kubernetes.io/projected/b9806903-d8e2-4131-9bf1-14d0e1c7597b-kube-api-access-l5zsd\") pod \"nmstate-metrics-7f946cbc9-9vchl\" (UID: \"b9806903-d8e2-4131-9bf1-14d0e1c7597b\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745498 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/05ce5aaa-1c86-4f42-add8-f24bb1789c02-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-vl7cd\" (UID: \"05ce5aaa-1c86-4f42-add8-f24bb1789c02\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745531 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdb5x\" (UniqueName: \"kubernetes.io/projected/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-kube-api-access-cdb5x\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745577 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-dbus-socket\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745593 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m99gb\" (UniqueName: \"kubernetes.io/projected/19ab958b-08b5-444a-a29b-b9e787c0bd29-kube-api-access-m99gb\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745610 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-ovs-socket\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745632 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745742 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6dz9j\" (UniqueName: \"kubernetes.io/projected/05ce5aaa-1c86-4f42-add8-f24bb1789c02-kube-api-access-6dz9j\") pod \"nmstate-webhook-5f6d4c5ccb-vl7cd\" (UID: \"05ce5aaa-1c86-4f42-add8-f24bb1789c02\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745766 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-nmstate-lock\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.745886 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.754957 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/05ce5aaa-1c86-4f42-add8-f24bb1789c02-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-vl7cd\" (UID: \"05ce5aaa-1c86-4f42-add8-f24bb1789c02\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.764848 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6dz9j\" (UniqueName: \"kubernetes.io/projected/05ce5aaa-1c86-4f42-add8-f24bb1789c02-kube-api-access-6dz9j\") pod \"nmstate-webhook-5f6d4c5ccb-vl7cd\" (UID: \"05ce5aaa-1c86-4f42-add8-f24bb1789c02\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.768090 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5zsd\" (UniqueName: \"kubernetes.io/projected/b9806903-d8e2-4131-9bf1-14d0e1c7597b-kube-api-access-l5zsd\") pod \"nmstate-metrics-7f946cbc9-9vchl\" (UID: \"b9806903-d8e2-4131-9bf1-14d0e1c7597b\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.789287 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.809135 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.838821 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-785d49f475-nnqp4"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.840317 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.847359 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-dbus-socket\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.847579 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m99gb\" (UniqueName: \"kubernetes.io/projected/19ab958b-08b5-444a-a29b-b9e787c0bd29-kube-api-access-m99gb\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.847720 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-ovs-socket\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.847818 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.847658 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-dbus-socket\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.847945 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-ovs-socket\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.848009 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-nmstate-lock\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.848094 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.848206 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdb5x\" (UniqueName: \"kubernetes.io/projected/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-kube-api-access-cdb5x\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.848700 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/19ab958b-08b5-444a-a29b-b9e787c0bd29-nmstate-lock\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.848746 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.863304 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.872692 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-785d49f475-nnqp4"] Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.887335 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdb5x\" (UniqueName: \"kubernetes.io/projected/82533b7d-e2b7-46f8-9522-e62da9f8f8dd-kube-api-access-cdb5x\") pod \"nmstate-console-plugin-7fbb5f6569-cr4c8\" (UID: \"82533b7d-e2b7-46f8-9522-e62da9f8f8dd\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.906379 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m99gb\" (UniqueName: \"kubernetes.io/projected/19ab958b-08b5-444a-a29b-b9e787c0bd29-kube-api-access-m99gb\") pod \"nmstate-handler-bh52g\" (UID: \"19ab958b-08b5-444a-a29b-b9e787c0bd29\") " pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.945373 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949023 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-trusted-ca-bundle\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949070 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-serving-cert\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949085 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-oauth-serving-cert\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949137 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-service-ca\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949169 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-config\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949204 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ch25v\" (UniqueName: \"kubernetes.io/projected/8912ac2f-2f20-4f74-92ab-4cc7e0963641-kube-api-access-ch25v\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:31:59 crc kubenswrapper[4669]: I1210 15:31:59.949305 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-oauth-config\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.050905 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ch25v\" (UniqueName: \"kubernetes.io/projected/8912ac2f-2f20-4f74-92ab-4cc7e0963641-kube-api-access-ch25v\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.050971 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-oauth-config\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.051003 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-trusted-ca-bundle\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.051045 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-serving-cert\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.051060 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-oauth-serving-cert\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.051133 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-service-ca\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.051159 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-config\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.052074 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-config\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.053103 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-trusted-ca-bundle\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.053666 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-oauth-serving-cert\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.054293 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8912ac2f-2f20-4f74-92ab-4cc7e0963641-service-ca\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.061926 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-serving-cert\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.062406 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/8912ac2f-2f20-4f74-92ab-4cc7e0963641-console-oauth-config\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.077669 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ch25v\" (UniqueName: \"kubernetes.io/projected/8912ac2f-2f20-4f74-92ab-4cc7e0963641-kube-api-access-ch25v\") pod \"console-785d49f475-nnqp4\" (UID: \"8912ac2f-2f20-4f74-92ab-4cc7e0963641\") " pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.139365 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd"] Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.176680 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.198046 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.203807 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl"] Dec 10 15:32:00 crc kubenswrapper[4669]: W1210 15:32:00.215403 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod19ab958b_08b5_444a_a29b_b9e787c0bd29.slice/crio-2f396f54b7ea44a657c14502b76091880250f029407316f3be778946d339851f WatchSource:0}: Error finding container 2f396f54b7ea44a657c14502b76091880250f029407316f3be778946d339851f: Status 404 returned error can't find the container with id 2f396f54b7ea44a657c14502b76091880250f029407316f3be778946d339851f Dec 10 15:32:00 crc kubenswrapper[4669]: W1210 15:32:00.219874 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb9806903_d8e2_4131_9bf1_14d0e1c7597b.slice/crio-c3d5211c613d9342f11a9933650fdaee46e4a3fd5dc39fe4da794c8078eeca8e WatchSource:0}: Error finding container c3d5211c613d9342f11a9933650fdaee46e4a3fd5dc39fe4da794c8078eeca8e: Status 404 returned error can't find the container with id c3d5211c613d9342f11a9933650fdaee46e4a3fd5dc39fe4da794c8078eeca8e Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.253950 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8"] Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.328589 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" event={"ID":"b9806903-d8e2-4131-9bf1-14d0e1c7597b","Type":"ContainerStarted","Data":"c3d5211c613d9342f11a9933650fdaee46e4a3fd5dc39fe4da794c8078eeca8e"} Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.329714 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-bh52g" event={"ID":"19ab958b-08b5-444a-a29b-b9e787c0bd29","Type":"ContainerStarted","Data":"2f396f54b7ea44a657c14502b76091880250f029407316f3be778946d339851f"} Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.330924 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" event={"ID":"05ce5aaa-1c86-4f42-add8-f24bb1789c02","Type":"ContainerStarted","Data":"227b7e437690e6ba8b5d9adc01b286e545331c4837315b5cf4699279f43fefea"} Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.331958 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" event={"ID":"82533b7d-e2b7-46f8-9522-e62da9f8f8dd","Type":"ContainerStarted","Data":"6303279651d6edc17d22d6d6bdb0aa289c1032a7172ca9cc7664d09fa4037c0e"} Dec 10 15:32:00 crc kubenswrapper[4669]: I1210 15:32:00.411636 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-785d49f475-nnqp4"] Dec 10 15:32:00 crc kubenswrapper[4669]: W1210 15:32:00.412779 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8912ac2f_2f20_4f74_92ab_4cc7e0963641.slice/crio-581ac1e5066eb881f1d4d5b383d89d9c72709386f2753c7bd208aa67091be2bd WatchSource:0}: Error finding container 581ac1e5066eb881f1d4d5b383d89d9c72709386f2753c7bd208aa67091be2bd: Status 404 returned error can't find the container with id 581ac1e5066eb881f1d4d5b383d89d9c72709386f2753c7bd208aa67091be2bd Dec 10 15:32:01 crc kubenswrapper[4669]: I1210 15:32:01.338529 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-785d49f475-nnqp4" event={"ID":"8912ac2f-2f20-4f74-92ab-4cc7e0963641","Type":"ContainerStarted","Data":"8b887e5b274c36ebc6cea5278b45624c15336c846d4df0ce2c296ae1052c57c6"} Dec 10 15:32:01 crc kubenswrapper[4669]: I1210 15:32:01.338871 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-785d49f475-nnqp4" event={"ID":"8912ac2f-2f20-4f74-92ab-4cc7e0963641","Type":"ContainerStarted","Data":"581ac1e5066eb881f1d4d5b383d89d9c72709386f2753c7bd208aa67091be2bd"} Dec 10 15:32:01 crc kubenswrapper[4669]: I1210 15:32:01.361695 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-785d49f475-nnqp4" podStartSLOduration=2.361671336 podStartE2EDuration="2.361671336s" podCreationTimestamp="2025-12-10 15:31:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:32:01.354856073 +0000 UTC m=+695.271802700" watchObservedRunningTime="2025-12-10 15:32:01.361671336 +0000 UTC m=+695.278617963" Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.355316 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" event={"ID":"05ce5aaa-1c86-4f42-add8-f24bb1789c02","Type":"ContainerStarted","Data":"908fc37a6d432a2faaddbb8c18589f82673d0760144b2cf02cb3f8abe5b077be"} Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.355757 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.358793 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" event={"ID":"b9806903-d8e2-4131-9bf1-14d0e1c7597b","Type":"ContainerStarted","Data":"587caa90e7e3a5215b628b7488cd31cd5c2392f792c8659cccc95c5bbdef2c7d"} Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.360408 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-bh52g" event={"ID":"19ab958b-08b5-444a-a29b-b9e787c0bd29","Type":"ContainerStarted","Data":"463b24c63525385562c885b391875cf507e9edce9804f40fe799cdcdcd6245c4"} Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.360530 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.377634 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" podStartSLOduration=2.014255339 podStartE2EDuration="4.377610793s" podCreationTimestamp="2025-12-10 15:31:59 +0000 UTC" firstStartedPulling="2025-12-10 15:32:00.154157556 +0000 UTC m=+694.071104183" lastFinishedPulling="2025-12-10 15:32:02.51751299 +0000 UTC m=+696.434459637" observedRunningTime="2025-12-10 15:32:03.369996171 +0000 UTC m=+697.286942798" watchObservedRunningTime="2025-12-10 15:32:03.377610793 +0000 UTC m=+697.294557420" Dec 10 15:32:03 crc kubenswrapper[4669]: I1210 15:32:03.388389 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-bh52g" podStartSLOduration=2.089280848 podStartE2EDuration="4.388374696s" podCreationTimestamp="2025-12-10 15:31:59 +0000 UTC" firstStartedPulling="2025-12-10 15:32:00.217708643 +0000 UTC m=+694.134655270" lastFinishedPulling="2025-12-10 15:32:02.516802491 +0000 UTC m=+696.433749118" observedRunningTime="2025-12-10 15:32:03.386485087 +0000 UTC m=+697.303431714" watchObservedRunningTime="2025-12-10 15:32:03.388374696 +0000 UTC m=+697.305321313" Dec 10 15:32:04 crc kubenswrapper[4669]: I1210 15:32:04.368631 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" event={"ID":"82533b7d-e2b7-46f8-9522-e62da9f8f8dd","Type":"ContainerStarted","Data":"12e301572fe11b765837d0668c77876bd535fde9189fc5e12bea3c94fd3fa824"} Dec 10 15:32:04 crc kubenswrapper[4669]: I1210 15:32:04.390893 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-cr4c8" podStartSLOduration=2.002754049 podStartE2EDuration="5.390877011s" podCreationTimestamp="2025-12-10 15:31:59 +0000 UTC" firstStartedPulling="2025-12-10 15:32:00.258958317 +0000 UTC m=+694.175904944" lastFinishedPulling="2025-12-10 15:32:03.647081279 +0000 UTC m=+697.564027906" observedRunningTime="2025-12-10 15:32:04.384440778 +0000 UTC m=+698.301387415" watchObservedRunningTime="2025-12-10 15:32:04.390877011 +0000 UTC m=+698.307823638" Dec 10 15:32:05 crc kubenswrapper[4669]: I1210 15:32:05.375185 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" event={"ID":"b9806903-d8e2-4131-9bf1-14d0e1c7597b","Type":"ContainerStarted","Data":"8b07affe3acc81455c3981a4f5ab59fcc4bb682f3b9de5e74adeba0cc9b3b7b6"} Dec 10 15:32:05 crc kubenswrapper[4669]: I1210 15:32:05.392194 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-9vchl" podStartSLOduration=1.643730808 podStartE2EDuration="6.392177916s" podCreationTimestamp="2025-12-10 15:31:59 +0000 UTC" firstStartedPulling="2025-12-10 15:32:00.223361806 +0000 UTC m=+694.140308433" lastFinishedPulling="2025-12-10 15:32:04.971808914 +0000 UTC m=+698.888755541" observedRunningTime="2025-12-10 15:32:05.387867417 +0000 UTC m=+699.304814054" watchObservedRunningTime="2025-12-10 15:32:05.392177916 +0000 UTC m=+699.309124533" Dec 10 15:32:10 crc kubenswrapper[4669]: I1210 15:32:10.199728 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:10 crc kubenswrapper[4669]: I1210 15:32:10.200370 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:10 crc kubenswrapper[4669]: I1210 15:32:10.211129 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:10 crc kubenswrapper[4669]: I1210 15:32:10.215747 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-bh52g" Dec 10 15:32:10 crc kubenswrapper[4669]: I1210 15:32:10.407277 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-785d49f475-nnqp4" Dec 10 15:32:10 crc kubenswrapper[4669]: I1210 15:32:10.523710 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dsw2s"] Dec 10 15:32:19 crc kubenswrapper[4669]: I1210 15:32:19.816243 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-vl7cd" Dec 10 15:32:31 crc kubenswrapper[4669]: I1210 15:32:31.855711 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4"] Dec 10 15:32:31 crc kubenswrapper[4669]: I1210 15:32:31.857581 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:31 crc kubenswrapper[4669]: I1210 15:32:31.859392 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 10 15:32:31 crc kubenswrapper[4669]: I1210 15:32:31.866354 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4"] Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.015734 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.016041 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxbgd\" (UniqueName: \"kubernetes.io/projected/dcb7775d-549e-4734-9047-1a9ff0cbca3c-kube-api-access-dxbgd\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.016091 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.117675 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.117771 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxbgd\" (UniqueName: \"kubernetes.io/projected/dcb7775d-549e-4734-9047-1a9ff0cbca3c-kube-api-access-dxbgd\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.117795 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.118438 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.118473 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.143015 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxbgd\" (UniqueName: \"kubernetes.io/projected/dcb7775d-549e-4734-9047-1a9ff0cbca3c-kube-api-access-dxbgd\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.176145 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:32 crc kubenswrapper[4669]: I1210 15:32:32.650777 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4"] Dec 10 15:32:32 crc kubenswrapper[4669]: W1210 15:32:32.671357 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddcb7775d_549e_4734_9047_1a9ff0cbca3c.slice/crio-28ee17dde73d709baa934a2974b7292ba8cec5f78017b7dbd1138d7199774f5f WatchSource:0}: Error finding container 28ee17dde73d709baa934a2974b7292ba8cec5f78017b7dbd1138d7199774f5f: Status 404 returned error can't find the container with id 28ee17dde73d709baa934a2974b7292ba8cec5f78017b7dbd1138d7199774f5f Dec 10 15:32:33 crc kubenswrapper[4669]: I1210 15:32:33.581445 4669 generic.go:334] "Generic (PLEG): container finished" podID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerID="84d953493922c2994dc0ec88ff4376f96d2bf8530ee0a76a26b18989d57b4df5" exitCode=0 Dec 10 15:32:33 crc kubenswrapper[4669]: I1210 15:32:33.581504 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" event={"ID":"dcb7775d-549e-4734-9047-1a9ff0cbca3c","Type":"ContainerDied","Data":"84d953493922c2994dc0ec88ff4376f96d2bf8530ee0a76a26b18989d57b4df5"} Dec 10 15:32:33 crc kubenswrapper[4669]: I1210 15:32:33.581861 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" event={"ID":"dcb7775d-549e-4734-9047-1a9ff0cbca3c","Type":"ContainerStarted","Data":"28ee17dde73d709baa934a2974b7292ba8cec5f78017b7dbd1138d7199774f5f"} Dec 10 15:32:35 crc kubenswrapper[4669]: I1210 15:32:35.559835 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-dsw2s" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" containerID="cri-o://7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529" gracePeriod=15 Dec 10 15:32:35 crc kubenswrapper[4669]: I1210 15:32:35.955121 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dsw2s_7d419e0a-917c-410c-820b-ddfab808a3fe/console/0.log" Dec 10 15:32:35 crc kubenswrapper[4669]: I1210 15:32:35.955207 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.077194 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-trusted-ca-bundle\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.078291 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.077417 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-serving-cert\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.078565 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-oauth-config\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.079031 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-console-config\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.079085 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-service-ca\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.079169 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvhcz\" (UniqueName: \"kubernetes.io/projected/7d419e0a-917c-410c-820b-ddfab808a3fe-kube-api-access-vvhcz\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.079207 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-oauth-serving-cert\") pod \"7d419e0a-917c-410c-820b-ddfab808a3fe\" (UID: \"7d419e0a-917c-410c-820b-ddfab808a3fe\") " Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.079812 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-console-config" (OuterVolumeSpecName: "console-config") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.079882 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-service-ca" (OuterVolumeSpecName: "service-ca") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.080294 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.080501 4669 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.080526 4669 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.080540 4669 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-console-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.080554 4669 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7d419e0a-917c-410c-820b-ddfab808a3fe-service-ca\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.084176 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.087581 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.087657 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d419e0a-917c-410c-820b-ddfab808a3fe-kube-api-access-vvhcz" (OuterVolumeSpecName: "kube-api-access-vvhcz") pod "7d419e0a-917c-410c-820b-ddfab808a3fe" (UID: "7d419e0a-917c-410c-820b-ddfab808a3fe"). InnerVolumeSpecName "kube-api-access-vvhcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.182172 4669 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.182645 4669 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7d419e0a-917c-410c-820b-ddfab808a3fe-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.182666 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvhcz\" (UniqueName: \"kubernetes.io/projected/7d419e0a-917c-410c-820b-ddfab808a3fe-kube-api-access-vvhcz\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.598746 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dsw2s_7d419e0a-917c-410c-820b-ddfab808a3fe/console/0.log" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.598785 4669 generic.go:334] "Generic (PLEG): container finished" podID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerID="7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529" exitCode=2 Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.598832 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsw2s" event={"ID":"7d419e0a-917c-410c-820b-ddfab808a3fe","Type":"ContainerDied","Data":"7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529"} Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.598860 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dsw2s" event={"ID":"7d419e0a-917c-410c-820b-ddfab808a3fe","Type":"ContainerDied","Data":"e58d099669588d23862cedf987de159f726897f5c67d5cb082aaa2fad933b1b9"} Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.598875 4669 scope.go:117] "RemoveContainer" containerID="7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.598960 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dsw2s" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.601299 4669 generic.go:334] "Generic (PLEG): container finished" podID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerID="16a6de29e2de4aacba6be3a585a0056444cd764a8f6c3c9f05b323e002783280" exitCode=0 Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.601323 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" event={"ID":"dcb7775d-549e-4734-9047-1a9ff0cbca3c","Type":"ContainerDied","Data":"16a6de29e2de4aacba6be3a585a0056444cd764a8f6c3c9f05b323e002783280"} Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.661897 4669 scope.go:117] "RemoveContainer" containerID="7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.670121 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dsw2s"] Dec 10 15:32:36 crc kubenswrapper[4669]: E1210 15:32:36.672436 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529\": container with ID starting with 7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529 not found: ID does not exist" containerID="7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.672486 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529"} err="failed to get container status \"7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529\": rpc error: code = NotFound desc = could not find container \"7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529\": container with ID starting with 7db6beeb48b25331ef6c01b4e003270a76d693d81efff3f07e305918323a9529 not found: ID does not exist" Dec 10 15:32:36 crc kubenswrapper[4669]: I1210 15:32:36.674059 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-dsw2s"] Dec 10 15:32:37 crc kubenswrapper[4669]: I1210 15:32:37.616021 4669 generic.go:334] "Generic (PLEG): container finished" podID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerID="a7218a1112e04630bacb540b4b2c7fafb8579d11907e940b92252df4c13a9ba8" exitCode=0 Dec 10 15:32:37 crc kubenswrapper[4669]: I1210 15:32:37.616111 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" event={"ID":"dcb7775d-549e-4734-9047-1a9ff0cbca3c","Type":"ContainerDied","Data":"a7218a1112e04630bacb540b4b2c7fafb8579d11907e940b92252df4c13a9ba8"} Dec 10 15:32:38 crc kubenswrapper[4669]: I1210 15:32:38.413766 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" path="/var/lib/kubelet/pods/7d419e0a-917c-410c-820b-ddfab808a3fe/volumes" Dec 10 15:32:38 crc kubenswrapper[4669]: I1210 15:32:38.921569 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.119684 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dxbgd\" (UniqueName: \"kubernetes.io/projected/dcb7775d-549e-4734-9047-1a9ff0cbca3c-kube-api-access-dxbgd\") pod \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.119912 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-util\") pod \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.119974 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-bundle\") pod \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\" (UID: \"dcb7775d-549e-4734-9047-1a9ff0cbca3c\") " Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.121309 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-bundle" (OuterVolumeSpecName: "bundle") pod "dcb7775d-549e-4734-9047-1a9ff0cbca3c" (UID: "dcb7775d-549e-4734-9047-1a9ff0cbca3c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.125871 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dcb7775d-549e-4734-9047-1a9ff0cbca3c-kube-api-access-dxbgd" (OuterVolumeSpecName: "kube-api-access-dxbgd") pod "dcb7775d-549e-4734-9047-1a9ff0cbca3c" (UID: "dcb7775d-549e-4734-9047-1a9ff0cbca3c"). InnerVolumeSpecName "kube-api-access-dxbgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.133811 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-util" (OuterVolumeSpecName: "util") pod "dcb7775d-549e-4734-9047-1a9ff0cbca3c" (UID: "dcb7775d-549e-4734-9047-1a9ff0cbca3c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.221988 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dxbgd\" (UniqueName: \"kubernetes.io/projected/dcb7775d-549e-4734-9047-1a9ff0cbca3c-kube-api-access-dxbgd\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.222049 4669 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-util\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.222078 4669 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/dcb7775d-549e-4734-9047-1a9ff0cbca3c-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.635415 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" event={"ID":"dcb7775d-549e-4734-9047-1a9ff0cbca3c","Type":"ContainerDied","Data":"28ee17dde73d709baa934a2974b7292ba8cec5f78017b7dbd1138d7199774f5f"} Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.635678 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28ee17dde73d709baa934a2974b7292ba8cec5f78017b7dbd1138d7199774f5f" Dec 10 15:32:39 crc kubenswrapper[4669]: I1210 15:32:39.635493 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.043506 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx"] Dec 10 15:32:50 crc kubenswrapper[4669]: E1210 15:32:50.044342 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="extract" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044360 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="extract" Dec 10 15:32:50 crc kubenswrapper[4669]: E1210 15:32:50.044375 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044382 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" Dec 10 15:32:50 crc kubenswrapper[4669]: E1210 15:32:50.044396 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="util" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044406 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="util" Dec 10 15:32:50 crc kubenswrapper[4669]: E1210 15:32:50.044422 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="pull" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044430 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="pull" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044586 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="dcb7775d-549e-4734-9047-1a9ff0cbca3c" containerName="extract" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044602 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d419e0a-917c-410c-820b-ddfab808a3fe" containerName="console" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.044949 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.046956 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.048235 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.050800 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.052817 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-bn49x" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.057088 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.073876 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx"] Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.159352 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/46153871-4c53-450c-a8ac-4cb540652173-webhook-cert\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.159401 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2kgq\" (UniqueName: \"kubernetes.io/projected/46153871-4c53-450c-a8ac-4cb540652173-kube-api-access-p2kgq\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.159445 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/46153871-4c53-450c-a8ac-4cb540652173-apiservice-cert\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.261084 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/46153871-4c53-450c-a8ac-4cb540652173-webhook-cert\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.261134 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2kgq\" (UniqueName: \"kubernetes.io/projected/46153871-4c53-450c-a8ac-4cb540652173-kube-api-access-p2kgq\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.261176 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/46153871-4c53-450c-a8ac-4cb540652173-apiservice-cert\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.267540 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/46153871-4c53-450c-a8ac-4cb540652173-webhook-cert\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.268050 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/46153871-4c53-450c-a8ac-4cb540652173-apiservice-cert\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.284936 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2kgq\" (UniqueName: \"kubernetes.io/projected/46153871-4c53-450c-a8ac-4cb540652173-kube-api-access-p2kgq\") pod \"metallb-operator-controller-manager-7b4fb5f468-2kmvx\" (UID: \"46153871-4c53-450c-a8ac-4cb540652173\") " pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.339591 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg"] Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.341204 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.346062 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.347245 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-2w96m" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.347516 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.361038 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.372547 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg"] Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.462816 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/943aba10-b13d-4ba0-900b-66cdc776b921-webhook-cert\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.462866 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9j4lw\" (UniqueName: \"kubernetes.io/projected/943aba10-b13d-4ba0-900b-66cdc776b921-kube-api-access-9j4lw\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.462902 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/943aba10-b13d-4ba0-900b-66cdc776b921-apiservice-cert\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.564291 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/943aba10-b13d-4ba0-900b-66cdc776b921-webhook-cert\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.564343 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9j4lw\" (UniqueName: \"kubernetes.io/projected/943aba10-b13d-4ba0-900b-66cdc776b921-kube-api-access-9j4lw\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.564403 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/943aba10-b13d-4ba0-900b-66cdc776b921-apiservice-cert\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.568890 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/943aba10-b13d-4ba0-900b-66cdc776b921-apiservice-cert\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.578022 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/943aba10-b13d-4ba0-900b-66cdc776b921-webhook-cert\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.601022 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9j4lw\" (UniqueName: \"kubernetes.io/projected/943aba10-b13d-4ba0-900b-66cdc776b921-kube-api-access-9j4lw\") pod \"metallb-operator-webhook-server-7cf4974685-slvbg\" (UID: \"943aba10-b13d-4ba0-900b-66cdc776b921\") " pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.659479 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:50 crc kubenswrapper[4669]: I1210 15:32:50.708893 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx"] Dec 10 15:32:50 crc kubenswrapper[4669]: W1210 15:32:50.743784 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46153871_4c53_450c_a8ac_4cb540652173.slice/crio-f4dbeb1e65de5660df821b47adc28a1e9981fab63a0c68385bab4bd1ae070510 WatchSource:0}: Error finding container f4dbeb1e65de5660df821b47adc28a1e9981fab63a0c68385bab4bd1ae070510: Status 404 returned error can't find the container with id f4dbeb1e65de5660df821b47adc28a1e9981fab63a0c68385bab4bd1ae070510 Dec 10 15:32:51 crc kubenswrapper[4669]: I1210 15:32:51.016886 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg"] Dec 10 15:32:51 crc kubenswrapper[4669]: W1210 15:32:51.024035 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod943aba10_b13d_4ba0_900b_66cdc776b921.slice/crio-ff6985d4df1f3829c2dd27e0acbb5c8174c69331a777244b7c8c0bf175f91020 WatchSource:0}: Error finding container ff6985d4df1f3829c2dd27e0acbb5c8174c69331a777244b7c8c0bf175f91020: Status 404 returned error can't find the container with id ff6985d4df1f3829c2dd27e0acbb5c8174c69331a777244b7c8c0bf175f91020 Dec 10 15:32:51 crc kubenswrapper[4669]: I1210 15:32:51.724874 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" event={"ID":"943aba10-b13d-4ba0-900b-66cdc776b921","Type":"ContainerStarted","Data":"ff6985d4df1f3829c2dd27e0acbb5c8174c69331a777244b7c8c0bf175f91020"} Dec 10 15:32:51 crc kubenswrapper[4669]: I1210 15:32:51.726018 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" event={"ID":"46153871-4c53-450c-a8ac-4cb540652173","Type":"ContainerStarted","Data":"f4dbeb1e65de5660df821b47adc28a1e9981fab63a0c68385bab4bd1ae070510"} Dec 10 15:32:57 crc kubenswrapper[4669]: I1210 15:32:57.763091 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" event={"ID":"46153871-4c53-450c-a8ac-4cb540652173","Type":"ContainerStarted","Data":"737e8e9a9c7a33e650a9a459bda97a91c83b70c1fd6958f9c217b177d2b661f2"} Dec 10 15:32:57 crc kubenswrapper[4669]: I1210 15:32:57.763687 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:32:57 crc kubenswrapper[4669]: I1210 15:32:57.765659 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" event={"ID":"943aba10-b13d-4ba0-900b-66cdc776b921","Type":"ContainerStarted","Data":"201925f67efd639777288717f937608f0ee54866024a5c8c9e5f2231664fb4a5"} Dec 10 15:32:57 crc kubenswrapper[4669]: I1210 15:32:57.765796 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:32:57 crc kubenswrapper[4669]: I1210 15:32:57.813699 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" podStartSLOduration=1.824900547 podStartE2EDuration="7.813680112s" podCreationTimestamp="2025-12-10 15:32:50 +0000 UTC" firstStartedPulling="2025-12-10 15:32:50.753067396 +0000 UTC m=+744.670014023" lastFinishedPulling="2025-12-10 15:32:56.741846961 +0000 UTC m=+750.658793588" observedRunningTime="2025-12-10 15:32:57.791940539 +0000 UTC m=+751.708887186" watchObservedRunningTime="2025-12-10 15:32:57.813680112 +0000 UTC m=+751.730626739" Dec 10 15:32:57 crc kubenswrapper[4669]: I1210 15:32:57.816059 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" podStartSLOduration=2.077173095 podStartE2EDuration="7.816047184s" podCreationTimestamp="2025-12-10 15:32:50 +0000 UTC" firstStartedPulling="2025-12-10 15:32:51.027034115 +0000 UTC m=+744.943980742" lastFinishedPulling="2025-12-10 15:32:56.765908204 +0000 UTC m=+750.682854831" observedRunningTime="2025-12-10 15:32:57.811662661 +0000 UTC m=+751.728609298" watchObservedRunningTime="2025-12-10 15:32:57.816047184 +0000 UTC m=+751.732993811" Dec 10 15:32:58 crc kubenswrapper[4669]: I1210 15:32:58.745655 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:32:58 crc kubenswrapper[4669]: I1210 15:32:58.745715 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:33:08 crc kubenswrapper[4669]: I1210 15:33:08.292893 4669 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 10 15:33:10 crc kubenswrapper[4669]: I1210 15:33:10.664603 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7cf4974685-slvbg" Dec 10 15:33:28 crc kubenswrapper[4669]: I1210 15:33:28.745140 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:33:28 crc kubenswrapper[4669]: I1210 15:33:28.745905 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:33:30 crc kubenswrapper[4669]: I1210 15:33:30.364507 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-7b4fb5f468-2kmvx" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.148952 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.149650 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.155148 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.157037 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-kkl4x" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.161765 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-8nqwr"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.164024 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.167707 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.168136 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.168157 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.260174 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-nvh5p"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.261450 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.262996 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-7k4dr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.263463 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.263593 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.263954 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.279770 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-5cxqp"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.280784 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.283027 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.305278 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-5cxqp"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340240 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7255015-ac33-4ed6-8b27-432ef76cd293-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nht72\" (UID: \"e7255015-ac33-4ed6-8b27-432ef76cd293\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340280 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-metrics-certs\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340305 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-conf\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340327 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvwss\" (UniqueName: \"kubernetes.io/projected/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-kube-api-access-rvwss\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340348 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q95xq\" (UniqueName: \"kubernetes.io/projected/e7255015-ac33-4ed6-8b27-432ef76cd293-kube-api-access-q95xq\") pod \"frr-k8s-webhook-server-7fcb986d4-nht72\" (UID: \"e7255015-ac33-4ed6-8b27-432ef76cd293\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340373 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-startup\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340411 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-sockets\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340432 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-metrics\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.340450 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-reloader\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442055 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-sockets\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442379 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9t49\" (UniqueName: \"kubernetes.io/projected/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-kube-api-access-q9t49\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442431 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-metrics-certs\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442465 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-metrics\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442480 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-cert\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442504 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-reloader\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442543 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs6r8\" (UniqueName: \"kubernetes.io/projected/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-kube-api-access-qs6r8\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442560 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-metallb-excludel2\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442579 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7255015-ac33-4ed6-8b27-432ef76cd293-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nht72\" (UID: \"e7255015-ac33-4ed6-8b27-432ef76cd293\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442597 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-metrics-certs\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442647 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-conf\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442645 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-sockets\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442665 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvwss\" (UniqueName: \"kubernetes.io/projected/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-kube-api-access-rvwss\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442716 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q95xq\" (UniqueName: \"kubernetes.io/projected/e7255015-ac33-4ed6-8b27-432ef76cd293-kube-api-access-q95xq\") pod \"frr-k8s-webhook-server-7fcb986d4-nht72\" (UID: \"e7255015-ac33-4ed6-8b27-432ef76cd293\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442765 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-metrics-certs\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442782 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-memberlist\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.442832 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-startup\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.443451 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-metrics\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.443660 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-reloader\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.444013 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-conf\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.444166 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-frr-startup\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.448170 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-metrics-certs\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.464306 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e7255015-ac33-4ed6-8b27-432ef76cd293-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-nht72\" (UID: \"e7255015-ac33-4ed6-8b27-432ef76cd293\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.478301 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q95xq\" (UniqueName: \"kubernetes.io/projected/e7255015-ac33-4ed6-8b27-432ef76cd293-kube-api-access-q95xq\") pod \"frr-k8s-webhook-server-7fcb986d4-nht72\" (UID: \"e7255015-ac33-4ed6-8b27-432ef76cd293\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.514897 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvwss\" (UniqueName: \"kubernetes.io/projected/e98ae1e5-2f3e-4204-bd7a-e7e00438c186-kube-api-access-rvwss\") pod \"frr-k8s-8nqwr\" (UID: \"e98ae1e5-2f3e-4204-bd7a-e7e00438c186\") " pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.543500 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-metrics-certs\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.543981 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-cert\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.544008 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs6r8\" (UniqueName: \"kubernetes.io/projected/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-kube-api-access-qs6r8\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.544027 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-metallb-excludel2\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.544066 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-metrics-certs\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.544081 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-memberlist\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.544163 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9t49\" (UniqueName: \"kubernetes.io/projected/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-kube-api-access-q9t49\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: E1210 15:33:31.544339 4669 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 10 15:33:31 crc kubenswrapper[4669]: E1210 15:33:31.544424 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-memberlist podName:963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5 nodeName:}" failed. No retries permitted until 2025-12-10 15:33:32.044403879 +0000 UTC m=+785.961350616 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-memberlist") pod "speaker-nvh5p" (UID: "963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5") : secret "metallb-memberlist" not found Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.544886 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-metallb-excludel2\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.548590 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-metrics-certs\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.550659 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-metrics-certs\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.551865 4669 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.557532 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-cert\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.562734 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs6r8\" (UniqueName: \"kubernetes.io/projected/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-kube-api-access-qs6r8\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.570303 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9t49\" (UniqueName: \"kubernetes.io/projected/7e1bd3e9-c78c-4b8b-84c1-b37502c9c927-kube-api-access-q9t49\") pod \"controller-f8648f98b-5cxqp\" (UID: \"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927\") " pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.593668 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.768717 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.779882 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-5cxqp"] Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.782333 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:31 crc kubenswrapper[4669]: W1210 15:33:31.791267 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7e1bd3e9_c78c_4b8b_84c1_b37502c9c927.slice/crio-d4f7c18a3dd4cef00dabd53bd63b6f4630070a77483f07afb3c82e3c723ac40f WatchSource:0}: Error finding container d4f7c18a3dd4cef00dabd53bd63b6f4630070a77483f07afb3c82e3c723ac40f: Status 404 returned error can't find the container with id d4f7c18a3dd4cef00dabd53bd63b6f4630070a77483f07afb3c82e3c723ac40f Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.961454 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-5cxqp" event={"ID":"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927","Type":"ContainerStarted","Data":"965743c9c5c251607ecf515a6d10a083554eab2023e10aa5ea054f4900c63b4d"} Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.961490 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-5cxqp" event={"ID":"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927","Type":"ContainerStarted","Data":"d4f7c18a3dd4cef00dabd53bd63b6f4630070a77483f07afb3c82e3c723ac40f"} Dec 10 15:33:31 crc kubenswrapper[4669]: I1210 15:33:31.963729 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"4f88d8d6b2b6021529324847c564f6485aa52986b14168a6ee45ef62f331f397"} Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.050827 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-memberlist\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.056273 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5-memberlist\") pod \"speaker-nvh5p\" (UID: \"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5\") " pod="metallb-system/speaker-nvh5p" Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.178239 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nvh5p" Dec 10 15:33:32 crc kubenswrapper[4669]: W1210 15:33:32.202476 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod963543ef_6eb5_4ccb_9b8b_a049dfe8b4e5.slice/crio-d36a24be667bd7637aa83caa32d3e01e811d8e366c804434ab51f5e8ac01077d WatchSource:0}: Error finding container d36a24be667bd7637aa83caa32d3e01e811d8e366c804434ab51f5e8ac01077d: Status 404 returned error can't find the container with id d36a24be667bd7637aa83caa32d3e01e811d8e366c804434ab51f5e8ac01077d Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.301759 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72"] Dec 10 15:33:32 crc kubenswrapper[4669]: W1210 15:33:32.311473 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode7255015_ac33_4ed6_8b27_432ef76cd293.slice/crio-b1d4ea73986c448f57fd18f7c83d50431cce96ba1d13d9f6660f714d8f6ba2b9 WatchSource:0}: Error finding container b1d4ea73986c448f57fd18f7c83d50431cce96ba1d13d9f6660f714d8f6ba2b9: Status 404 returned error can't find the container with id b1d4ea73986c448f57fd18f7c83d50431cce96ba1d13d9f6660f714d8f6ba2b9 Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.976302 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-5cxqp" event={"ID":"7e1bd3e9-c78c-4b8b-84c1-b37502c9c927","Type":"ContainerStarted","Data":"5a4d1eea8292b72541d62d2cfeaf83fbba77d20032b91ed3c6a8c3dac5da8c7c"} Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.976721 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.979363 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" event={"ID":"e7255015-ac33-4ed6-8b27-432ef76cd293","Type":"ContainerStarted","Data":"b1d4ea73986c448f57fd18f7c83d50431cce96ba1d13d9f6660f714d8f6ba2b9"} Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.981615 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nvh5p" event={"ID":"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5","Type":"ContainerStarted","Data":"8e0d11f599ad7cc6272baba5a238c43a43bfbdd9a881d63c7dfcd8985b9b7264"} Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.981647 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nvh5p" event={"ID":"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5","Type":"ContainerStarted","Data":"b4c4ffdfb0ce67f5bb89d89d7024799ed5abb54434f590c78f118f6f647d2208"} Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.981661 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nvh5p" event={"ID":"963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5","Type":"ContainerStarted","Data":"d36a24be667bd7637aa83caa32d3e01e811d8e366c804434ab51f5e8ac01077d"} Dec 10 15:33:32 crc kubenswrapper[4669]: I1210 15:33:32.982377 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-nvh5p" Dec 10 15:33:33 crc kubenswrapper[4669]: I1210 15:33:33.008136 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-5cxqp" podStartSLOduration=2.008115926 podStartE2EDuration="2.008115926s" podCreationTimestamp="2025-12-10 15:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:33:32.999240844 +0000 UTC m=+786.916187471" watchObservedRunningTime="2025-12-10 15:33:33.008115926 +0000 UTC m=+786.925062573" Dec 10 15:33:33 crc kubenswrapper[4669]: I1210 15:33:33.031291 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-nvh5p" podStartSLOduration=2.031272621 podStartE2EDuration="2.031272621s" podCreationTimestamp="2025-12-10 15:33:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:33:33.029431534 +0000 UTC m=+786.946378171" watchObservedRunningTime="2025-12-10 15:33:33.031272621 +0000 UTC m=+786.948219258" Dec 10 15:33:40 crc kubenswrapper[4669]: I1210 15:33:40.036742 4669 generic.go:334] "Generic (PLEG): container finished" podID="e98ae1e5-2f3e-4204-bd7a-e7e00438c186" containerID="0716fcfc39f56f2d60d27cbc8ca92b4e796ae1d1c60c95dbce6f2b4738805c4c" exitCode=0 Dec 10 15:33:40 crc kubenswrapper[4669]: I1210 15:33:40.036798 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerDied","Data":"0716fcfc39f56f2d60d27cbc8ca92b4e796ae1d1c60c95dbce6f2b4738805c4c"} Dec 10 15:33:40 crc kubenswrapper[4669]: I1210 15:33:40.038763 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" event={"ID":"e7255015-ac33-4ed6-8b27-432ef76cd293","Type":"ContainerStarted","Data":"c0eef43f6c70fdba122e101b423292941016f13e0cbce71209b84d4b4fa48dd4"} Dec 10 15:33:40 crc kubenswrapper[4669]: I1210 15:33:40.038989 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:40 crc kubenswrapper[4669]: I1210 15:33:40.097508 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" podStartSLOduration=1.9902877110000001 podStartE2EDuration="9.097486346s" podCreationTimestamp="2025-12-10 15:33:31 +0000 UTC" firstStartedPulling="2025-12-10 15:33:32.313686325 +0000 UTC m=+786.230632952" lastFinishedPulling="2025-12-10 15:33:39.42088495 +0000 UTC m=+793.337831587" observedRunningTime="2025-12-10 15:33:40.09268255 +0000 UTC m=+794.009629207" watchObservedRunningTime="2025-12-10 15:33:40.097486346 +0000 UTC m=+794.014433003" Dec 10 15:33:41 crc kubenswrapper[4669]: I1210 15:33:41.046457 4669 generic.go:334] "Generic (PLEG): container finished" podID="e98ae1e5-2f3e-4204-bd7a-e7e00438c186" containerID="59b66b6b363586f15e2c4cf71217e496a8d40d6ea225b75fcb3cde8d9fba4a00" exitCode=0 Dec 10 15:33:41 crc kubenswrapper[4669]: I1210 15:33:41.046582 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerDied","Data":"59b66b6b363586f15e2c4cf71217e496a8d40d6ea225b75fcb3cde8d9fba4a00"} Dec 10 15:33:41 crc kubenswrapper[4669]: I1210 15:33:41.601839 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-5cxqp" Dec 10 15:33:42 crc kubenswrapper[4669]: I1210 15:33:42.053581 4669 generic.go:334] "Generic (PLEG): container finished" podID="e98ae1e5-2f3e-4204-bd7a-e7e00438c186" containerID="310b46b03cb7cb61799b9174a4cba049cbd117654ac1edce3e170c4e24dbf414" exitCode=0 Dec 10 15:33:42 crc kubenswrapper[4669]: I1210 15:33:42.053642 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerDied","Data":"310b46b03cb7cb61799b9174a4cba049cbd117654ac1edce3e170c4e24dbf414"} Dec 10 15:33:42 crc kubenswrapper[4669]: I1210 15:33:42.182725 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-nvh5p" Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.066239 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"e08f944fbb22342b906e4a65e5679c688c04ff1b157fd88345cd86dfb6b927e6"} Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.066546 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"2c36523e0d51a720c1d530d49b39da71bd16d327466ceb388e1cd713363258c1"} Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.066563 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"53cbd8b72a132fd9ecbd826af8575e456f5790806ae52d5a7e2d616ad62ea47d"} Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.066579 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"cbee8f76e1ecfb70b1ebb6a26eed2e62df16d7aa492d6f3d54f3bfb61b820afe"} Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.066595 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"c68feb240400e265485ad2919a939e157b0f46ec3327d76321ea5b2b2d7ae123"} Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.067988 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.068026 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-8nqwr" event={"ID":"e98ae1e5-2f3e-4204-bd7a-e7e00438c186","Type":"ContainerStarted","Data":"3f15504c8d4c27b5c3c80396c1f00fa4ec67c8faaa627a457ced5101952ffd25"} Dec 10 15:33:43 crc kubenswrapper[4669]: I1210 15:33:43.099733 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-8nqwr" podStartSLOduration=4.5829634299999995 podStartE2EDuration="12.099707884s" podCreationTimestamp="2025-12-10 15:33:31 +0000 UTC" firstStartedPulling="2025-12-10 15:33:31.926737477 +0000 UTC m=+785.843684104" lastFinishedPulling="2025-12-10 15:33:39.443481901 +0000 UTC m=+793.360428558" observedRunningTime="2025-12-10 15:33:43.093876422 +0000 UTC m=+797.010823059" watchObservedRunningTime="2025-12-10 15:33:43.099707884 +0000 UTC m=+797.016654551" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.214397 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-8l949"] Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.215547 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.220289 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.220570 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-bwk7q" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.220750 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.227625 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8l949"] Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.280691 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rkk7\" (UniqueName: \"kubernetes.io/projected/e1ec0727-710d-45cf-85ef-5a634d710c4b-kube-api-access-6rkk7\") pod \"openstack-operator-index-8l949\" (UID: \"e1ec0727-710d-45cf-85ef-5a634d710c4b\") " pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.381641 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rkk7\" (UniqueName: \"kubernetes.io/projected/e1ec0727-710d-45cf-85ef-5a634d710c4b-kube-api-access-6rkk7\") pod \"openstack-operator-index-8l949\" (UID: \"e1ec0727-710d-45cf-85ef-5a634d710c4b\") " pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.404291 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rkk7\" (UniqueName: \"kubernetes.io/projected/e1ec0727-710d-45cf-85ef-5a634d710c4b-kube-api-access-6rkk7\") pod \"openstack-operator-index-8l949\" (UID: \"e1ec0727-710d-45cf-85ef-5a634d710c4b\") " pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.535974 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.783180 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:46 crc kubenswrapper[4669]: I1210 15:33:46.842723 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:33:47 crc kubenswrapper[4669]: I1210 15:33:47.086464 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-8l949"] Dec 10 15:33:48 crc kubenswrapper[4669]: I1210 15:33:48.099146 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8l949" event={"ID":"e1ec0727-710d-45cf-85ef-5a634d710c4b","Type":"ContainerStarted","Data":"60e02312eb8466767f7245a6ed55de8a0118126663518b7cbd81ecd4f179ff4a"} Dec 10 15:33:49 crc kubenswrapper[4669]: I1210 15:33:49.587711 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-8l949"] Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.113797 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8l949" event={"ID":"e1ec0727-710d-45cf-85ef-5a634d710c4b","Type":"ContainerStarted","Data":"e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637"} Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.113910 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-8l949" podUID="e1ec0727-710d-45cf-85ef-5a634d710c4b" containerName="registry-server" containerID="cri-o://e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637" gracePeriod=2 Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.131351 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-8l949" podStartSLOduration=1.408269328 podStartE2EDuration="4.131329324s" podCreationTimestamp="2025-12-10 15:33:46 +0000 UTC" firstStartedPulling="2025-12-10 15:33:47.095843317 +0000 UTC m=+801.012789944" lastFinishedPulling="2025-12-10 15:33:49.818903303 +0000 UTC m=+803.735849940" observedRunningTime="2025-12-10 15:33:50.130866032 +0000 UTC m=+804.047812669" watchObservedRunningTime="2025-12-10 15:33:50.131329324 +0000 UTC m=+804.048275951" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.257307 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-b7cfc"] Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.258148 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.263541 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-b7cfc"] Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.457161 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmc6c\" (UniqueName: \"kubernetes.io/projected/b25ef981-69d0-4a4e-bbb8-62f7a32cefdb-kube-api-access-gmc6c\") pod \"openstack-operator-index-b7cfc\" (UID: \"b25ef981-69d0-4a4e-bbb8-62f7a32cefdb\") " pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.499728 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.559455 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmc6c\" (UniqueName: \"kubernetes.io/projected/b25ef981-69d0-4a4e-bbb8-62f7a32cefdb-kube-api-access-gmc6c\") pod \"openstack-operator-index-b7cfc\" (UID: \"b25ef981-69d0-4a4e-bbb8-62f7a32cefdb\") " pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.581810 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmc6c\" (UniqueName: \"kubernetes.io/projected/b25ef981-69d0-4a4e-bbb8-62f7a32cefdb-kube-api-access-gmc6c\") pod \"openstack-operator-index-b7cfc\" (UID: \"b25ef981-69d0-4a4e-bbb8-62f7a32cefdb\") " pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.598667 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.662036 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6rkk7\" (UniqueName: \"kubernetes.io/projected/e1ec0727-710d-45cf-85ef-5a634d710c4b-kube-api-access-6rkk7\") pod \"e1ec0727-710d-45cf-85ef-5a634d710c4b\" (UID: \"e1ec0727-710d-45cf-85ef-5a634d710c4b\") " Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.667804 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ec0727-710d-45cf-85ef-5a634d710c4b-kube-api-access-6rkk7" (OuterVolumeSpecName: "kube-api-access-6rkk7") pod "e1ec0727-710d-45cf-85ef-5a634d710c4b" (UID: "e1ec0727-710d-45cf-85ef-5a634d710c4b"). InnerVolumeSpecName "kube-api-access-6rkk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.763135 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6rkk7\" (UniqueName: \"kubernetes.io/projected/e1ec0727-710d-45cf-85ef-5a634d710c4b-kube-api-access-6rkk7\") on node \"crc\" DevicePath \"\"" Dec 10 15:33:50 crc kubenswrapper[4669]: I1210 15:33:50.982131 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-b7cfc"] Dec 10 15:33:50 crc kubenswrapper[4669]: W1210 15:33:50.996866 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb25ef981_69d0_4a4e_bbb8_62f7a32cefdb.slice/crio-df8e18095e83c08e02e0ee9c040bc91d46ee4e92ec243d65506bbdd2a45c5195 WatchSource:0}: Error finding container df8e18095e83c08e02e0ee9c040bc91d46ee4e92ec243d65506bbdd2a45c5195: Status 404 returned error can't find the container with id df8e18095e83c08e02e0ee9c040bc91d46ee4e92ec243d65506bbdd2a45c5195 Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.121373 4669 generic.go:334] "Generic (PLEG): container finished" podID="e1ec0727-710d-45cf-85ef-5a634d710c4b" containerID="e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637" exitCode=0 Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.121483 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-8l949" Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.122051 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8l949" event={"ID":"e1ec0727-710d-45cf-85ef-5a634d710c4b","Type":"ContainerDied","Data":"e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637"} Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.122091 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-8l949" event={"ID":"e1ec0727-710d-45cf-85ef-5a634d710c4b","Type":"ContainerDied","Data":"60e02312eb8466767f7245a6ed55de8a0118126663518b7cbd81ecd4f179ff4a"} Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.122108 4669 scope.go:117] "RemoveContainer" containerID="e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637" Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.125272 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b7cfc" event={"ID":"b25ef981-69d0-4a4e-bbb8-62f7a32cefdb","Type":"ContainerStarted","Data":"df8e18095e83c08e02e0ee9c040bc91d46ee4e92ec243d65506bbdd2a45c5195"} Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.148311 4669 scope.go:117] "RemoveContainer" containerID="e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637" Dec 10 15:33:51 crc kubenswrapper[4669]: E1210 15:33:51.148786 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637\": container with ID starting with e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637 not found: ID does not exist" containerID="e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637" Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.148844 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637"} err="failed to get container status \"e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637\": rpc error: code = NotFound desc = could not find container \"e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637\": container with ID starting with e1075aaa945b926967d896cd2ee740db0d6539e7f7d27d518be9fe9e63d95637 not found: ID does not exist" Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.170030 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-8l949"] Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.175611 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-8l949"] Dec 10 15:33:51 crc kubenswrapper[4669]: I1210 15:33:51.775023 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-nht72" Dec 10 15:33:52 crc kubenswrapper[4669]: I1210 15:33:52.136091 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-b7cfc" event={"ID":"b25ef981-69d0-4a4e-bbb8-62f7a32cefdb","Type":"ContainerStarted","Data":"63cb9f6f5d274bde0927310074e9426d60bac17488c5a4e04e21ac04f44d79c4"} Dec 10 15:33:52 crc kubenswrapper[4669]: I1210 15:33:52.416051 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ec0727-710d-45cf-85ef-5a634d710c4b" path="/var/lib/kubelet/pods/e1ec0727-710d-45cf-85ef-5a634d710c4b/volumes" Dec 10 15:33:58 crc kubenswrapper[4669]: I1210 15:33:58.745189 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:33:58 crc kubenswrapper[4669]: I1210 15:33:58.745784 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:33:58 crc kubenswrapper[4669]: I1210 15:33:58.745829 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:33:58 crc kubenswrapper[4669]: I1210 15:33:58.746493 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c16924e004007c4e5fe251725834049c68819cdeff3df1d8eef2127a3516ef0e"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:33:58 crc kubenswrapper[4669]: I1210 15:33:58.746585 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://c16924e004007c4e5fe251725834049c68819cdeff3df1d8eef2127a3516ef0e" gracePeriod=600 Dec 10 15:33:59 crc kubenswrapper[4669]: I1210 15:33:59.187192 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="c16924e004007c4e5fe251725834049c68819cdeff3df1d8eef2127a3516ef0e" exitCode=0 Dec 10 15:33:59 crc kubenswrapper[4669]: I1210 15:33:59.187260 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"c16924e004007c4e5fe251725834049c68819cdeff3df1d8eef2127a3516ef0e"} Dec 10 15:33:59 crc kubenswrapper[4669]: I1210 15:33:59.187584 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"482e52d3d8c64b3e83e8a6d04d8d5d20434b81c087e0b47c0a8e6b34cdbf278e"} Dec 10 15:33:59 crc kubenswrapper[4669]: I1210 15:33:59.187613 4669 scope.go:117] "RemoveContainer" containerID="9e8fc93e3f55db44f154c5930fd8404d5d53a93663bdc6cd89f8ba24e2f46edf" Dec 10 15:33:59 crc kubenswrapper[4669]: I1210 15:33:59.206578 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-b7cfc" podStartSLOduration=9.153797323 podStartE2EDuration="9.206552741s" podCreationTimestamp="2025-12-10 15:33:50 +0000 UTC" firstStartedPulling="2025-12-10 15:33:51.00395399 +0000 UTC m=+804.920900627" lastFinishedPulling="2025-12-10 15:33:51.056709418 +0000 UTC m=+804.973656045" observedRunningTime="2025-12-10 15:33:52.160241176 +0000 UTC m=+806.077187813" watchObservedRunningTime="2025-12-10 15:33:59.206552741 +0000 UTC m=+813.123499378" Dec 10 15:34:00 crc kubenswrapper[4669]: I1210 15:34:00.598878 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:34:00 crc kubenswrapper[4669]: I1210 15:34:00.599326 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:34:00 crc kubenswrapper[4669]: I1210 15:34:00.638439 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:34:01 crc kubenswrapper[4669]: I1210 15:34:01.234439 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-b7cfc" Dec 10 15:34:01 crc kubenswrapper[4669]: I1210 15:34:01.785624 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-8nqwr" Dec 10 15:34:07 crc kubenswrapper[4669]: I1210 15:34:07.890394 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d"] Dec 10 15:34:07 crc kubenswrapper[4669]: E1210 15:34:07.890961 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ec0727-710d-45cf-85ef-5a634d710c4b" containerName="registry-server" Dec 10 15:34:07 crc kubenswrapper[4669]: I1210 15:34:07.890977 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ec0727-710d-45cf-85ef-5a634d710c4b" containerName="registry-server" Dec 10 15:34:07 crc kubenswrapper[4669]: I1210 15:34:07.891113 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ec0727-710d-45cf-85ef-5a634d710c4b" containerName="registry-server" Dec 10 15:34:07 crc kubenswrapper[4669]: I1210 15:34:07.892148 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:07 crc kubenswrapper[4669]: I1210 15:34:07.895669 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-l22bd" Dec 10 15:34:07 crc kubenswrapper[4669]: I1210 15:34:07.907588 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d"] Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.018664 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5z6ch\" (UniqueName: \"kubernetes.io/projected/2358ffe2-b531-4017-b8af-fc3915d57ee2-kube-api-access-5z6ch\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.018744 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-util\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.018790 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-bundle\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.120797 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5z6ch\" (UniqueName: \"kubernetes.io/projected/2358ffe2-b531-4017-b8af-fc3915d57ee2-kube-api-access-5z6ch\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.120904 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-util\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.120977 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-bundle\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.121888 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-util\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.122039 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-bundle\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.155152 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5z6ch\" (UniqueName: \"kubernetes.io/projected/2358ffe2-b531-4017-b8af-fc3915d57ee2-kube-api-access-5z6ch\") pod \"5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.217169 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:08 crc kubenswrapper[4669]: I1210 15:34:08.642273 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d"] Dec 10 15:34:09 crc kubenswrapper[4669]: I1210 15:34:09.270881 4669 generic.go:334] "Generic (PLEG): container finished" podID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerID="37dbad345d83994e1629f91d6a8e4b5f4df4a4759d127014cd738e9f4dbe5687" exitCode=0 Dec 10 15:34:09 crc kubenswrapper[4669]: I1210 15:34:09.270956 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" event={"ID":"2358ffe2-b531-4017-b8af-fc3915d57ee2","Type":"ContainerDied","Data":"37dbad345d83994e1629f91d6a8e4b5f4df4a4759d127014cd738e9f4dbe5687"} Dec 10 15:34:09 crc kubenswrapper[4669]: I1210 15:34:09.271037 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" event={"ID":"2358ffe2-b531-4017-b8af-fc3915d57ee2","Type":"ContainerStarted","Data":"702050768dc367f2e556f825055edb54856b535d76653b97f04b64c41fbda084"} Dec 10 15:34:10 crc kubenswrapper[4669]: I1210 15:34:10.281943 4669 generic.go:334] "Generic (PLEG): container finished" podID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerID="3646567518d7f98d20f6dbacf5abea1c5510fbf558436258341789a973e970b2" exitCode=0 Dec 10 15:34:10 crc kubenswrapper[4669]: I1210 15:34:10.282052 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" event={"ID":"2358ffe2-b531-4017-b8af-fc3915d57ee2","Type":"ContainerDied","Data":"3646567518d7f98d20f6dbacf5abea1c5510fbf558436258341789a973e970b2"} Dec 10 15:34:11 crc kubenswrapper[4669]: I1210 15:34:11.295730 4669 generic.go:334] "Generic (PLEG): container finished" podID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerID="8fc48a5349f48a5cd6737ed593ec695c10e642ec074e13ce3e62a9fef280a70b" exitCode=0 Dec 10 15:34:11 crc kubenswrapper[4669]: I1210 15:34:11.295876 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" event={"ID":"2358ffe2-b531-4017-b8af-fc3915d57ee2","Type":"ContainerDied","Data":"8fc48a5349f48a5cd6737ed593ec695c10e642ec074e13ce3e62a9fef280a70b"} Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.600974 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.789066 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5z6ch\" (UniqueName: \"kubernetes.io/projected/2358ffe2-b531-4017-b8af-fc3915d57ee2-kube-api-access-5z6ch\") pod \"2358ffe2-b531-4017-b8af-fc3915d57ee2\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.789158 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-bundle\") pod \"2358ffe2-b531-4017-b8af-fc3915d57ee2\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.789179 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-util\") pod \"2358ffe2-b531-4017-b8af-fc3915d57ee2\" (UID: \"2358ffe2-b531-4017-b8af-fc3915d57ee2\") " Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.797909 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-bundle" (OuterVolumeSpecName: "bundle") pod "2358ffe2-b531-4017-b8af-fc3915d57ee2" (UID: "2358ffe2-b531-4017-b8af-fc3915d57ee2"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.802322 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-util" (OuterVolumeSpecName: "util") pod "2358ffe2-b531-4017-b8af-fc3915d57ee2" (UID: "2358ffe2-b531-4017-b8af-fc3915d57ee2"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.810726 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2358ffe2-b531-4017-b8af-fc3915d57ee2-kube-api-access-5z6ch" (OuterVolumeSpecName: "kube-api-access-5z6ch") pod "2358ffe2-b531-4017-b8af-fc3915d57ee2" (UID: "2358ffe2-b531-4017-b8af-fc3915d57ee2"). InnerVolumeSpecName "kube-api-access-5z6ch". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.890616 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5z6ch\" (UniqueName: \"kubernetes.io/projected/2358ffe2-b531-4017-b8af-fc3915d57ee2-kube-api-access-5z6ch\") on node \"crc\" DevicePath \"\"" Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.890862 4669 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:34:12 crc kubenswrapper[4669]: I1210 15:34:12.890944 4669 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2358ffe2-b531-4017-b8af-fc3915d57ee2-util\") on node \"crc\" DevicePath \"\"" Dec 10 15:34:13 crc kubenswrapper[4669]: I1210 15:34:13.314913 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" event={"ID":"2358ffe2-b531-4017-b8af-fc3915d57ee2","Type":"ContainerDied","Data":"702050768dc367f2e556f825055edb54856b535d76653b97f04b64c41fbda084"} Dec 10 15:34:13 crc kubenswrapper[4669]: I1210 15:34:13.314958 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="702050768dc367f2e556f825055edb54856b535d76653b97f04b64c41fbda084" Dec 10 15:34:13 crc kubenswrapper[4669]: I1210 15:34:13.314995 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.144975 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l"] Dec 10 15:34:20 crc kubenswrapper[4669]: E1210 15:34:20.145516 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="extract" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.145530 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="extract" Dec 10 15:34:20 crc kubenswrapper[4669]: E1210 15:34:20.145538 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="pull" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.145544 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="pull" Dec 10 15:34:20 crc kubenswrapper[4669]: E1210 15:34:20.145556 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="util" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.145562 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="util" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.145668 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="2358ffe2-b531-4017-b8af-fc3915d57ee2" containerName="extract" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.146058 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.148984 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-qjvhc" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.175706 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l"] Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.285607 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5gc7\" (UniqueName: \"kubernetes.io/projected/2fa923a3-6ffe-40a5-8130-ff8220c64847-kube-api-access-l5gc7\") pod \"openstack-operator-controller-operator-84dffcd785-tcn6l\" (UID: \"2fa923a3-6ffe-40a5-8130-ff8220c64847\") " pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.387321 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5gc7\" (UniqueName: \"kubernetes.io/projected/2fa923a3-6ffe-40a5-8130-ff8220c64847-kube-api-access-l5gc7\") pod \"openstack-operator-controller-operator-84dffcd785-tcn6l\" (UID: \"2fa923a3-6ffe-40a5-8130-ff8220c64847\") " pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.414629 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5gc7\" (UniqueName: \"kubernetes.io/projected/2fa923a3-6ffe-40a5-8130-ff8220c64847-kube-api-access-l5gc7\") pod \"openstack-operator-controller-operator-84dffcd785-tcn6l\" (UID: \"2fa923a3-6ffe-40a5-8130-ff8220c64847\") " pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.465688 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:20 crc kubenswrapper[4669]: W1210 15:34:20.724577 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fa923a3_6ffe_40a5_8130_ff8220c64847.slice/crio-684ea0a4cedec7767a59cc750f25faf4d6f577e9854291c3e5eda9c5ab60da2a WatchSource:0}: Error finding container 684ea0a4cedec7767a59cc750f25faf4d6f577e9854291c3e5eda9c5ab60da2a: Status 404 returned error can't find the container with id 684ea0a4cedec7767a59cc750f25faf4d6f577e9854291c3e5eda9c5ab60da2a Dec 10 15:34:20 crc kubenswrapper[4669]: I1210 15:34:20.726883 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l"] Dec 10 15:34:21 crc kubenswrapper[4669]: I1210 15:34:21.375644 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" event={"ID":"2fa923a3-6ffe-40a5-8130-ff8220c64847","Type":"ContainerStarted","Data":"684ea0a4cedec7767a59cc750f25faf4d6f577e9854291c3e5eda9c5ab60da2a"} Dec 10 15:34:26 crc kubenswrapper[4669]: I1210 15:34:26.417182 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" event={"ID":"2fa923a3-6ffe-40a5-8130-ff8220c64847","Type":"ContainerStarted","Data":"4b53b9bf01f872b2c30b05206a7146f24928a9e105094dccb5f60aa580bd9529"} Dec 10 15:34:26 crc kubenswrapper[4669]: I1210 15:34:26.418432 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:30 crc kubenswrapper[4669]: I1210 15:34:30.469064 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" Dec 10 15:34:30 crc kubenswrapper[4669]: I1210 15:34:30.497494 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-84dffcd785-tcn6l" podStartSLOduration=5.858116935 podStartE2EDuration="10.49747681s" podCreationTimestamp="2025-12-10 15:34:20 +0000 UTC" firstStartedPulling="2025-12-10 15:34:20.735455529 +0000 UTC m=+834.652402156" lastFinishedPulling="2025-12-10 15:34:25.374815404 +0000 UTC m=+839.291762031" observedRunningTime="2025-12-10 15:34:26.465400394 +0000 UTC m=+840.382347061" watchObservedRunningTime="2025-12-10 15:34:30.49747681 +0000 UTC m=+844.414423437" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.011313 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.013128 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.015628 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-j7vqj" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.027862 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.035576 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.036562 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.041792 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-8l2g7" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.054931 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.074077 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.083392 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.089918 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-nr25q" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.102900 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mftgr\" (UniqueName: \"kubernetes.io/projected/4ecff5d1-1a76-4282-a11e-ee74b69e7450-kube-api-access-mftgr\") pod \"cinder-operator-controller-manager-6c677c69b-d6c6m\" (UID: \"4ecff5d1-1a76-4282-a11e-ee74b69e7450\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.102967 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmm9l\" (UniqueName: \"kubernetes.io/projected/a260011c-3fcf-47cd-9472-20b180b4bd2f-kube-api-access-hmm9l\") pod \"barbican-operator-controller-manager-7d9dfd778-lhblg\" (UID: \"a260011c-3fcf-47cd-9472-20b180b4bd2f\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.102988 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rckrr\" (UniqueName: \"kubernetes.io/projected/f0da37f3-9f8c-4d66-ba13-6c1da41ceba2-kube-api-access-rckrr\") pod \"designate-operator-controller-manager-697fb699cf-47j5t\" (UID: \"f0da37f3-9f8c-4d66-ba13-6c1da41ceba2\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.114279 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.115476 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.120406 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-9kmnb" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.124288 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.125707 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.138291 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-zzq2f" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.146601 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.149835 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.151616 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.156958 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-gcssm" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.162363 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.171380 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.204012 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mftgr\" (UniqueName: \"kubernetes.io/projected/4ecff5d1-1a76-4282-a11e-ee74b69e7450-kube-api-access-mftgr\") pod \"cinder-operator-controller-manager-6c677c69b-d6c6m\" (UID: \"4ecff5d1-1a76-4282-a11e-ee74b69e7450\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.204088 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klzng\" (UniqueName: \"kubernetes.io/projected/edd21671-d820-4fb7-835e-97fd0ade3909-kube-api-access-klzng\") pod \"horizon-operator-controller-manager-68c6d99b8f-fkxpl\" (UID: \"edd21671-d820-4fb7-835e-97fd0ade3909\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.204128 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64bz8\" (UniqueName: \"kubernetes.io/projected/ec748f3b-e193-43da-8d3b-c6d6169f58b5-kube-api-access-64bz8\") pod \"heat-operator-controller-manager-5f64f6f8bb-gkcl6\" (UID: \"ec748f3b-e193-43da-8d3b-c6d6169f58b5\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.204163 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmm9l\" (UniqueName: \"kubernetes.io/projected/a260011c-3fcf-47cd-9472-20b180b4bd2f-kube-api-access-hmm9l\") pod \"barbican-operator-controller-manager-7d9dfd778-lhblg\" (UID: \"a260011c-3fcf-47cd-9472-20b180b4bd2f\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.204183 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rckrr\" (UniqueName: \"kubernetes.io/projected/f0da37f3-9f8c-4d66-ba13-6c1da41ceba2-kube-api-access-rckrr\") pod \"designate-operator-controller-manager-697fb699cf-47j5t\" (UID: \"f0da37f3-9f8c-4d66-ba13-6c1da41ceba2\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.204243 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ks7g5\" (UniqueName: \"kubernetes.io/projected/9b8ca892-98a2-4e46-816f-548631ceaf50-kube-api-access-ks7g5\") pod \"glance-operator-controller-manager-5697bb5779-vzhnx\" (UID: \"9b8ca892-98a2-4e46-816f-548631ceaf50\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.207678 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.208887 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.210710 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.222682 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-4fk6w" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.271154 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rckrr\" (UniqueName: \"kubernetes.io/projected/f0da37f3-9f8c-4d66-ba13-6c1da41ceba2-kube-api-access-rckrr\") pod \"designate-operator-controller-manager-697fb699cf-47j5t\" (UID: \"f0da37f3-9f8c-4d66-ba13-6c1da41ceba2\") " pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.279157 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.300159 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmm9l\" (UniqueName: \"kubernetes.io/projected/a260011c-3fcf-47cd-9472-20b180b4bd2f-kube-api-access-hmm9l\") pod \"barbican-operator-controller-manager-7d9dfd778-lhblg\" (UID: \"a260011c-3fcf-47cd-9472-20b180b4bd2f\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.308850 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ks7g5\" (UniqueName: \"kubernetes.io/projected/9b8ca892-98a2-4e46-816f-548631ceaf50-kube-api-access-ks7g5\") pod \"glance-operator-controller-manager-5697bb5779-vzhnx\" (UID: \"9b8ca892-98a2-4e46-816f-548631ceaf50\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.309024 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.309124 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klzng\" (UniqueName: \"kubernetes.io/projected/edd21671-d820-4fb7-835e-97fd0ade3909-kube-api-access-klzng\") pod \"horizon-operator-controller-manager-68c6d99b8f-fkxpl\" (UID: \"edd21671-d820-4fb7-835e-97fd0ade3909\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.309351 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp29s\" (UniqueName: \"kubernetes.io/projected/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-kube-api-access-wp29s\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.309508 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64bz8\" (UniqueName: \"kubernetes.io/projected/ec748f3b-e193-43da-8d3b-c6d6169f58b5-kube-api-access-64bz8\") pod \"heat-operator-controller-manager-5f64f6f8bb-gkcl6\" (UID: \"ec748f3b-e193-43da-8d3b-c6d6169f58b5\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.310591 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mftgr\" (UniqueName: \"kubernetes.io/projected/4ecff5d1-1a76-4282-a11e-ee74b69e7450-kube-api-access-mftgr\") pod \"cinder-operator-controller-manager-6c677c69b-d6c6m\" (UID: \"4ecff5d1-1a76-4282-a11e-ee74b69e7450\") " pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.341302 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.347972 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-sh22l"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.373676 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.374490 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.379716 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-2dfr6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.383336 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klzng\" (UniqueName: \"kubernetes.io/projected/edd21671-d820-4fb7-835e-97fd0ade3909-kube-api-access-klzng\") pod \"horizon-operator-controller-manager-68c6d99b8f-fkxpl\" (UID: \"edd21671-d820-4fb7-835e-97fd0ade3909\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.391141 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.405041 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.414529 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.414577 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbkx2\" (UniqueName: \"kubernetes.io/projected/332b9f2c-9474-4368-9d76-3e98561c2279-kube-api-access-hbkx2\") pod \"ironic-operator-controller-manager-967d97867-sh22l\" (UID: \"332b9f2c-9474-4368-9d76-3e98561c2279\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.414602 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64bz8\" (UniqueName: \"kubernetes.io/projected/ec748f3b-e193-43da-8d3b-c6d6169f58b5-kube-api-access-64bz8\") pod \"heat-operator-controller-manager-5f64f6f8bb-gkcl6\" (UID: \"ec748f3b-e193-43da-8d3b-c6d6169f58b5\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.414610 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp29s\" (UniqueName: \"kubernetes.io/projected/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-kube-api-access-wp29s\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:08 crc kubenswrapper[4669]: E1210 15:35:08.415122 4669 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:08 crc kubenswrapper[4669]: E1210 15:35:08.417204 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert podName:cfb93e7d-25ad-468f-8b68-9b6b57676a5a nodeName:}" failed. No retries permitted until 2025-12-10 15:35:08.917185929 +0000 UTC m=+882.834132556 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert") pod "infra-operator-controller-manager-78d48bff9d-qq45r" (UID: "cfb93e7d-25ad-468f-8b68-9b6b57676a5a") : secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.422782 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-sh22l"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.422813 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.423762 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.425429 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ks7g5\" (UniqueName: \"kubernetes.io/projected/9b8ca892-98a2-4e46-816f-548631ceaf50-kube-api-access-ks7g5\") pod \"glance-operator-controller-manager-5697bb5779-vzhnx\" (UID: \"9b8ca892-98a2-4e46-816f-548631ceaf50\") " pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.429923 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.431097 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.431946 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.432474 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.433008 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-wn2hp" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.437283 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.449282 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.449684 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.449953 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-w5cl6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.457273 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-n7qph" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.459535 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.460595 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp29s\" (UniqueName: \"kubernetes.io/projected/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-kube-api-access-wp29s\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.487059 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.487229 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.519285 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.520466 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.523113 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6kv4f\" (UniqueName: \"kubernetes.io/projected/3daa730d-a51c-4330-8d36-712f27114f09-kube-api-access-6kv4f\") pod \"mariadb-operator-controller-manager-79c8c4686c-866l8\" (UID: \"3daa730d-a51c-4330-8d36-712f27114f09\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.523363 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbkx2\" (UniqueName: \"kubernetes.io/projected/332b9f2c-9474-4368-9d76-3e98561c2279-kube-api-access-hbkx2\") pod \"ironic-operator-controller-manager-967d97867-sh22l\" (UID: \"332b9f2c-9474-4368-9d76-3e98561c2279\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.523527 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgv5g\" (UniqueName: \"kubernetes.io/projected/460630fb-9db1-487a-af29-d92b820e0a1b-kube-api-access-tgv5g\") pod \"keystone-operator-controller-manager-7765d96ddf-t7wdn\" (UID: \"460630fb-9db1-487a-af29-d92b820e0a1b\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.523660 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7sn9\" (UniqueName: \"kubernetes.io/projected/c24a6a06-852c-476f-bb86-03c1e2430a48-kube-api-access-z7sn9\") pod \"manila-operator-controller-manager-5b5fd79c9c-c9chn\" (UID: \"c24a6a06-852c-476f-bb86-03c1e2430a48\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.536561 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-px4d8" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.558873 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8"] Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.560272 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:35:08 crc kubenswrapper[4669]: I1210 15:35:08.578618 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-ndhgj" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.596755 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.597773 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.602983 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-ktvw5" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.620686 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbkx2\" (UniqueName: \"kubernetes.io/projected/332b9f2c-9474-4368-9d76-3e98561c2279-kube-api-access-hbkx2\") pod \"ironic-operator-controller-manager-967d97867-sh22l\" (UID: \"332b9f2c-9474-4368-9d76-3e98561c2279\") " pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.624973 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbg27\" (UniqueName: \"kubernetes.io/projected/d27eeff8-d1a3-4d08-a474-076b14194921-kube-api-access-tbg27\") pod \"nova-operator-controller-manager-697bc559fc-8cxm8\" (UID: \"d27eeff8-d1a3-4d08-a474-076b14194921\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.625023 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxqdm\" (UniqueName: \"kubernetes.io/projected/5f83dee3-b4c5-4c8a-ba44-78d74195e59c-kube-api-access-jxqdm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-6ln9s\" (UID: \"5f83dee3-b4c5-4c8a-ba44-78d74195e59c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.625084 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6kv4f\" (UniqueName: \"kubernetes.io/projected/3daa730d-a51c-4330-8d36-712f27114f09-kube-api-access-6kv4f\") pod \"mariadb-operator-controller-manager-79c8c4686c-866l8\" (UID: \"3daa730d-a51c-4330-8d36-712f27114f09\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.625139 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgv5g\" (UniqueName: \"kubernetes.io/projected/460630fb-9db1-487a-af29-d92b820e0a1b-kube-api-access-tgv5g\") pod \"keystone-operator-controller-manager-7765d96ddf-t7wdn\" (UID: \"460630fb-9db1-487a-af29-d92b820e0a1b\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.625166 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7sn9\" (UniqueName: \"kubernetes.io/projected/c24a6a06-852c-476f-bb86-03c1e2430a48-kube-api-access-z7sn9\") pod \"manila-operator-controller-manager-5b5fd79c9c-c9chn\" (UID: \"c24a6a06-852c-476f-bb86-03c1e2430a48\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.625206 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv79m\" (UniqueName: \"kubernetes.io/projected/e68c43d5-161b-4ab1-9592-8a2d7f32f7eb-kube-api-access-zv79m\") pod \"octavia-operator-controller-manager-998648c74-cgdhp\" (UID: \"e68c43d5-161b-4ab1-9592-8a2d7f32f7eb\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.625766 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.640620 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.654285 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.654384 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.672424 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-2s44z" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.672624 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.680325 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6kv4f\" (UniqueName: \"kubernetes.io/projected/3daa730d-a51c-4330-8d36-712f27114f09-kube-api-access-6kv4f\") pod \"mariadb-operator-controller-manager-79c8c4686c-866l8\" (UID: \"3daa730d-a51c-4330-8d36-712f27114f09\") " pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.695277 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgv5g\" (UniqueName: \"kubernetes.io/projected/460630fb-9db1-487a-af29-d92b820e0a1b-kube-api-access-tgv5g\") pod \"keystone-operator-controller-manager-7765d96ddf-t7wdn\" (UID: \"460630fb-9db1-487a-af29-d92b820e0a1b\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.704207 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7sn9\" (UniqueName: \"kubernetes.io/projected/c24a6a06-852c-476f-bb86-03c1e2430a48-kube-api-access-z7sn9\") pod \"manila-operator-controller-manager-5b5fd79c9c-c9chn\" (UID: \"c24a6a06-852c-476f-bb86-03c1e2430a48\") " pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.778182 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.778562 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbg27\" (UniqueName: \"kubernetes.io/projected/d27eeff8-d1a3-4d08-a474-076b14194921-kube-api-access-tbg27\") pod \"nova-operator-controller-manager-697bc559fc-8cxm8\" (UID: \"d27eeff8-d1a3-4d08-a474-076b14194921\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.778581 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxqdm\" (UniqueName: \"kubernetes.io/projected/5f83dee3-b4c5-4c8a-ba44-78d74195e59c-kube-api-access-jxqdm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-6ln9s\" (UID: \"5f83dee3-b4c5-4c8a-ba44-78d74195e59c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.778642 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6tn7\" (UniqueName: \"kubernetes.io/projected/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-kube-api-access-f6tn7\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.778667 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.778692 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv79m\" (UniqueName: \"kubernetes.io/projected/e68c43d5-161b-4ab1-9592-8a2d7f32f7eb-kube-api-access-zv79m\") pod \"octavia-operator-controller-manager-998648c74-cgdhp\" (UID: \"e68c43d5-161b-4ab1-9592-8a2d7f32f7eb\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.786069 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.787753 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.787826 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.804675 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-dlztv" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.806044 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-2x88f"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.807039 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.817962 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.818046 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-2x88f"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.827486 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.828646 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.839537 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.858730 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-cfjc9" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.866436 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-6pgtm" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.876266 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.877541 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.879712 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6tn7\" (UniqueName: \"kubernetes.io/projected/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-kube-api-access-f6tn7\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.879761 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8zsx\" (UniqueName: \"kubernetes.io/projected/812c4aed-2b51-4ae9-b36d-c3ac85d47d73-kube-api-access-s8zsx\") pod \"placement-operator-controller-manager-78f8948974-2x88f\" (UID: \"812c4aed-2b51-4ae9-b36d-c3ac85d47d73\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.879799 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ksp6h\" (UniqueName: \"kubernetes.io/projected/2db5775a-2728-4581-98c9-155056e55c21-kube-api-access-ksp6h\") pod \"telemetry-operator-controller-manager-58d5ff84df-57gx4\" (UID: \"2db5775a-2728-4581-98c9-155056e55c21\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.879823 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.879870 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mn6wp\" (UniqueName: \"kubernetes.io/projected/35c1b18e-dac3-46c5-8714-44b5f7cc3462-kube-api-access-mn6wp\") pod \"ovn-operator-controller-manager-b6456fdb6-5zbv5\" (UID: \"35c1b18e-dac3-46c5-8714-44b5f7cc3462\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.879896 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8fk5\" (UniqueName: \"kubernetes.io/projected/b169689d-8a97-407f-81f7-56497bc77f0b-kube-api-access-q8fk5\") pod \"swift-operator-controller-manager-9d58d64bc-n8qx2\" (UID: \"b169689d-8a97-407f-81f7-56497bc77f0b\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:08.880376 4669 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:08.880421 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert podName:b8b8c80e-24e4-40bc-9927-21ce8b6c2667 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:09.380405143 +0000 UTC m=+883.297351780 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f99k4v" (UID: "b8b8c80e-24e4-40bc-9927-21ce8b6c2667") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.980160 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv79m\" (UniqueName: \"kubernetes.io/projected/e68c43d5-161b-4ab1-9592-8a2d7f32f7eb-kube-api-access-zv79m\") pod \"octavia-operator-controller-manager-998648c74-cgdhp\" (UID: \"e68c43d5-161b-4ab1-9592-8a2d7f32f7eb\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:08.990134 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-25wk6" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.001593 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ksp6h\" (UniqueName: \"kubernetes.io/projected/2db5775a-2728-4581-98c9-155056e55c21-kube-api-access-ksp6h\") pod \"telemetry-operator-controller-manager-58d5ff84df-57gx4\" (UID: \"2db5775a-2728-4581-98c9-155056e55c21\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.001661 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mn6wp\" (UniqueName: \"kubernetes.io/projected/35c1b18e-dac3-46c5-8714-44b5f7cc3462-kube-api-access-mn6wp\") pod \"ovn-operator-controller-manager-b6456fdb6-5zbv5\" (UID: \"35c1b18e-dac3-46c5-8714-44b5f7cc3462\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.001691 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8fk5\" (UniqueName: \"kubernetes.io/projected/b169689d-8a97-407f-81f7-56497bc77f0b-kube-api-access-q8fk5\") pod \"swift-operator-controller-manager-9d58d64bc-n8qx2\" (UID: \"b169689d-8a97-407f-81f7-56497bc77f0b\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.001774 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.001847 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8zsx\" (UniqueName: \"kubernetes.io/projected/812c4aed-2b51-4ae9-b36d-c3ac85d47d73-kube-api-access-s8zsx\") pod \"placement-operator-controller-manager-78f8948974-2x88f\" (UID: \"812c4aed-2b51-4ae9-b36d-c3ac85d47d73\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.030178 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxqdm\" (UniqueName: \"kubernetes.io/projected/5f83dee3-b4c5-4c8a-ba44-78d74195e59c-kube-api-access-jxqdm\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-6ln9s\" (UID: \"5f83dee3-b4c5-4c8a-ba44-78d74195e59c\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.050451 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.051887 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbg27\" (UniqueName: \"kubernetes.io/projected/d27eeff8-d1a3-4d08-a474-076b14194921-kube-api-access-tbg27\") pod \"nova-operator-controller-manager-697bc559fc-8cxm8\" (UID: \"d27eeff8-d1a3-4d08-a474-076b14194921\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.054054 4669 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.068830 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert podName:cfb93e7d-25ad-468f-8b68-9b6b57676a5a nodeName:}" failed. No retries permitted until 2025-12-10 15:35:10.068797139 +0000 UTC m=+883.985743766 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert") pod "infra-operator-controller-manager-78d48bff9d-qq45r" (UID: "cfb93e7d-25ad-468f-8b68-9b6b57676a5a") : secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.072665 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6tn7\" (UniqueName: \"kubernetes.io/projected/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-kube-api-access-f6tn7\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.088252 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8fk5\" (UniqueName: \"kubernetes.io/projected/b169689d-8a97-407f-81f7-56497bc77f0b-kube-api-access-q8fk5\") pod \"swift-operator-controller-manager-9d58d64bc-n8qx2\" (UID: \"b169689d-8a97-407f-81f7-56497bc77f0b\") " pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.143383 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mn6wp\" (UniqueName: \"kubernetes.io/projected/35c1b18e-dac3-46c5-8714-44b5f7cc3462-kube-api-access-mn6wp\") pod \"ovn-operator-controller-manager-b6456fdb6-5zbv5\" (UID: \"35c1b18e-dac3-46c5-8714-44b5f7cc3462\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.150928 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.152026 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.156058 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ksp6h\" (UniqueName: \"kubernetes.io/projected/2db5775a-2728-4581-98c9-155056e55c21-kube-api-access-ksp6h\") pod \"telemetry-operator-controller-manager-58d5ff84df-57gx4\" (UID: \"2db5775a-2728-4581-98c9-155056e55c21\") " pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.156454 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8zsx\" (UniqueName: \"kubernetes.io/projected/812c4aed-2b51-4ae9-b36d-c3ac85d47d73-kube-api-access-s8zsx\") pod \"placement-operator-controller-manager-78f8948974-2x88f\" (UID: \"812c4aed-2b51-4ae9-b36d-c3ac85d47d73\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.158619 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-6j9wn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.166512 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.183123 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.184792 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.198263 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.198810 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-fk27k" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.231699 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf2r5\" (UniqueName: \"kubernetes.io/projected/0afaf438-c06e-45c6-a814-d032d7a43700-kube-api-access-lf2r5\") pod \"test-operator-controller-manager-5854674fcc-dsxdb\" (UID: \"0afaf438-c06e-45c6-a814-d032d7a43700\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.245071 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.246282 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.279988 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-hgkgr" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.280127 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.280381 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.301706 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.332836 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.332874 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8gw5\" (UniqueName: \"kubernetes.io/projected/3e2afd98-5854-4fd7-abe6-059174bf661e-kube-api-access-x8gw5\") pod \"watcher-operator-controller-manager-75944c9b7-w4vkk\" (UID: \"3e2afd98-5854-4fd7-abe6-059174bf661e\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.332896 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.332912 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvhwt\" (UniqueName: \"kubernetes.io/projected/1530ae38-d334-436e-9599-54f0caeaf3c4-kube-api-access-rvhwt\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.332980 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf2r5\" (UniqueName: \"kubernetes.io/projected/0afaf438-c06e-45c6-a814-d032d7a43700-kube-api-access-lf2r5\") pod \"test-operator-controller-manager-5854674fcc-dsxdb\" (UID: \"0afaf438-c06e-45c6-a814-d032d7a43700\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.375479 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.385032 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf2r5\" (UniqueName: \"kubernetes.io/projected/0afaf438-c06e-45c6-a814-d032d7a43700-kube-api-access-lf2r5\") pod \"test-operator-controller-manager-5854674fcc-dsxdb\" (UID: \"0afaf438-c06e-45c6-a814-d032d7a43700\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.458167 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.458308 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8gw5\" (UniqueName: \"kubernetes.io/projected/3e2afd98-5854-4fd7-abe6-059174bf661e-kube-api-access-x8gw5\") pod \"watcher-operator-controller-manager-75944c9b7-w4vkk\" (UID: \"3e2afd98-5854-4fd7-abe6-059174bf661e\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.458393 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.458463 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvhwt\" (UniqueName: \"kubernetes.io/projected/1530ae38-d334-436e-9599-54f0caeaf3c4-kube-api-access-rvhwt\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.458642 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.458818 4669 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.458916 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert podName:b8b8c80e-24e4-40bc-9927-21ce8b6c2667 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:10.458898979 +0000 UTC m=+884.375845606 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f99k4v" (UID: "b8b8c80e-24e4-40bc-9927-21ce8b6c2667") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.459059 4669 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.459126 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:09.959103544 +0000 UTC m=+883.876050171 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.459165 4669 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.459192 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:09.959185186 +0000 UTC m=+883.876131803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "metrics-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.463414 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.494900 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.581411 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8gw5\" (UniqueName: \"kubernetes.io/projected/3e2afd98-5854-4fd7-abe6-059174bf661e-kube-api-access-x8gw5\") pod \"watcher-operator-controller-manager-75944c9b7-w4vkk\" (UID: \"3e2afd98-5854-4fd7-abe6-059174bf661e\") " pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.582438 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvhwt\" (UniqueName: \"kubernetes.io/projected/1530ae38-d334-436e-9599-54f0caeaf3c4-kube-api-access-rvhwt\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.592840 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.593809 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.600347 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-zf4pr" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.603375 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.654873 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.695978 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.716419 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.739885 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.776889 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jmll5\" (UniqueName: \"kubernetes.io/projected/02302af5-b29e-4346-9f30-70ec3d5f8b59-kube-api-access-jmll5\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dg5lw\" (UID: \"02302af5-b29e-4346-9f30-70ec3d5f8b59\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.808768 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.813589 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.831203 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl"] Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.857421 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.869084 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.880647 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jmll5\" (UniqueName: \"kubernetes.io/projected/02302af5-b29e-4346-9f30-70ec3d5f8b59-kube-api-access-jmll5\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dg5lw\" (UID: \"02302af5-b29e-4346-9f30-70ec3d5f8b59\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.925722 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.933755 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jmll5\" (UniqueName: \"kubernetes.io/projected/02302af5-b29e-4346-9f30-70ec3d5f8b59-kube-api-access-jmll5\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dg5lw\" (UID: \"02302af5-b29e-4346-9f30-70ec3d5f8b59\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.981978 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: I1210 15:35:09.982033 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.982951 4669 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.982992 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:10.982979057 +0000 UTC m=+884.899925684 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "metrics-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.983615 4669 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 15:35:09 crc kubenswrapper[4669]: E1210 15:35:09.983642 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:10.983634894 +0000 UTC m=+884.900581521 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "webhook-server-cert" not found Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:09.999787 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.015195 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.019028 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m"] Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.087370 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:10 crc kubenswrapper[4669]: E1210 15:35:10.088151 4669 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:10 crc kubenswrapper[4669]: E1210 15:35:10.088203 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert podName:cfb93e7d-25ad-468f-8b68-9b6b57676a5a nodeName:}" failed. No retries permitted until 2025-12-10 15:35:12.088185977 +0000 UTC m=+886.005132594 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert") pod "infra-operator-controller-manager-78d48bff9d-qq45r" (UID: "cfb93e7d-25ad-468f-8b68-9b6b57676a5a") : secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.209466 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t"] Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.234313 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.250920 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6"] Dec 10 15:35:10 crc kubenswrapper[4669]: W1210 15:35:10.275203 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf0da37f3_9f8c_4d66_ba13_6c1da41ceba2.slice/crio-c2ab97dd462cba42f73e1d5192707ffbed4203cb3acefd4c6bc599ac6526eba7 WatchSource:0}: Error finding container c2ab97dd462cba42f73e1d5192707ffbed4203cb3acefd4c6bc599ac6526eba7: Status 404 returned error can't find the container with id c2ab97dd462cba42f73e1d5192707ffbed4203cb3acefd4c6bc599ac6526eba7 Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.475904 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx"] Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.517527 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:10 crc kubenswrapper[4669]: E1210 15:35:10.517711 4669 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:10 crc kubenswrapper[4669]: E1210 15:35:10.517764 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert podName:b8b8c80e-24e4-40bc-9927-21ce8b6c2667 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:12.517751095 +0000 UTC m=+886.434697722 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f99k4v" (UID: "b8b8c80e-24e4-40bc-9927-21ce8b6c2667") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.617235 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn"] Dec 10 15:35:10 crc kubenswrapper[4669]: W1210 15:35:10.622265 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc24a6a06_852c_476f_bb86_03c1e2430a48.slice/crio-603e8b9c9e3ddf3b0aecb8c9618d5c86d89b6fb6f61f2fa2f51e38dbd5c0812d WatchSource:0}: Error finding container 603e8b9c9e3ddf3b0aecb8c9618d5c86d89b6fb6f61f2fa2f51e38dbd5c0812d: Status 404 returned error can't find the container with id 603e8b9c9e3ddf3b0aecb8c9618d5c86d89b6fb6f61f2fa2f51e38dbd5c0812d Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.695546 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-967d97867-sh22l"] Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.760684 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" event={"ID":"9b8ca892-98a2-4e46-816f-548631ceaf50","Type":"ContainerStarted","Data":"51b0f087a07776c127354a459cd70b508646c1b2542fc064a8c1b47b03ed9a6c"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.765505 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" event={"ID":"f0da37f3-9f8c-4d66-ba13-6c1da41ceba2","Type":"ContainerStarted","Data":"c2ab97dd462cba42f73e1d5192707ffbed4203cb3acefd4c6bc599ac6526eba7"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.766536 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" event={"ID":"332b9f2c-9474-4368-9d76-3e98561c2279","Type":"ContainerStarted","Data":"3d4235e57865b93bbd03978177a3307af23aa3b1c13fe5cbd2d87e4dc436f51a"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.767578 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" event={"ID":"edd21671-d820-4fb7-835e-97fd0ade3909","Type":"ContainerStarted","Data":"9487575f46708ad26d9e57651f2f65f8af676b0e2352b98334bdb2ca37995e98"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.767880 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn"] Dec 10 15:35:10 crc kubenswrapper[4669]: W1210 15:35:10.769014 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod460630fb_9db1_487a_af29_d92b820e0a1b.slice/crio-8899232569ec0022c267c0d5a4698174b976e8c962eb0141b92341a3bc4770ef WatchSource:0}: Error finding container 8899232569ec0022c267c0d5a4698174b976e8c962eb0141b92341a3bc4770ef: Status 404 returned error can't find the container with id 8899232569ec0022c267c0d5a4698174b976e8c962eb0141b92341a3bc4770ef Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.769375 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" event={"ID":"c24a6a06-852c-476f-bb86-03c1e2430a48","Type":"ContainerStarted","Data":"603e8b9c9e3ddf3b0aecb8c9618d5c86d89b6fb6f61f2fa2f51e38dbd5c0812d"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.771127 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" event={"ID":"4ecff5d1-1a76-4282-a11e-ee74b69e7450","Type":"ContainerStarted","Data":"a3be33a43422b5e3a735b9cdfd41ef82c1253e66fcd341b24e0c51d0c42a2a69"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.777609 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" event={"ID":"a260011c-3fcf-47cd-9472-20b180b4bd2f","Type":"ContainerStarted","Data":"3dd1cdbfc319bdff0ef9146f6ba8a96b097693102f260c84597119b939fe9858"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.781830 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" event={"ID":"ec748f3b-e193-43da-8d3b-c6d6169f58b5","Type":"ContainerStarted","Data":"61d5c3de24b47e5b465f6902d856c0e890b426b9ff3a0fd88f03618aac97cd76"} Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.837459 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-2x88f"] Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.853248 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s"] Dec 10 15:35:10 crc kubenswrapper[4669]: I1210 15:35:10.858650 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8"] Dec 10 15:35:10 crc kubenswrapper[4669]: W1210 15:35:10.862461 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3daa730d_a51c_4330_8d36_712f27114f09.slice/crio-37c0412a888a6f39066f1121bd3508a772911e951377dea367e6da2fbc6a91de WatchSource:0}: Error finding container 37c0412a888a6f39066f1121bd3508a772911e951377dea367e6da2fbc6a91de: Status 404 returned error can't find the container with id 37c0412a888a6f39066f1121bd3508a772911e951377dea367e6da2fbc6a91de Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.021323 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5"] Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.024932 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.024973 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.025170 4669 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.025237 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:13.02520117 +0000 UTC m=+886.942147797 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "webhook-server-cert" not found Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.025265 4669 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.025333 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:13.025315003 +0000 UTC m=+886.942261630 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "metrics-server-cert" not found Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.068135 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk"] Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.079865 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp"] Dec 10 15:35:11 crc kubenswrapper[4669]: W1210 15:35:11.105532 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e2afd98_5854_4fd7_abe6_059174bf661e.slice/crio-05c16bacf6f5b34fd4c55deedf83e6ce60fb7c84419d918d6eb72ff3ad325d7e WatchSource:0}: Error finding container 05c16bacf6f5b34fd4c55deedf83e6ce60fb7c84419d918d6eb72ff3ad325d7e: Status 404 returned error can't find the container with id 05c16bacf6f5b34fd4c55deedf83e6ce60fb7c84419d918d6eb72ff3ad325d7e Dec 10 15:35:11 crc kubenswrapper[4669]: W1210 15:35:11.105857 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb169689d_8a97_407f_81f7_56497bc77f0b.slice/crio-10dbac8afef15398cb9b607c19bf933885f284f0be91ac10e91cfd0cfcaec18b WatchSource:0}: Error finding container 10dbac8afef15398cb9b607c19bf933885f284f0be91ac10e91cfd0cfcaec18b: Status 404 returned error can't find the container with id 10dbac8afef15398cb9b607c19bf933885f284f0be91ac10e91cfd0cfcaec18b Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.106010 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw"] Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.120253 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q8fk5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-n8qx2_openstack-operators(b169689d-8a97-407f-81f7-56497bc77f0b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.120645 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x8gw5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75944c9b7-w4vkk_openstack-operators(3e2afd98-5854-4fd7-abe6-059174bf661e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.121890 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2"] Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.122613 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q8fk5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-n8qx2_openstack-operators(b169689d-8a97-407f-81f7-56497bc77f0b): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.123018 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x8gw5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-75944c9b7-w4vkk_openstack-operators(3e2afd98-5854-4fd7-abe6-059174bf661e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.124337 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" podUID="3e2afd98-5854-4fd7-abe6-059174bf661e" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.124406 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" podUID="b169689d-8a97-407f-81f7-56497bc77f0b" Dec 10 15:35:11 crc kubenswrapper[4669]: W1210 15:35:11.124998 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0afaf438_c06e_45c6_a814_d032d7a43700.slice/crio-72017007c6a16c51634e92b5f24ad0cf85d74309c96fc976f53429131fbeda94 WatchSource:0}: Error finding container 72017007c6a16c51634e92b5f24ad0cf85d74309c96fc976f53429131fbeda94: Status 404 returned error can't find the container with id 72017007c6a16c51634e92b5f24ad0cf85d74309c96fc976f53429131fbeda94 Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.129297 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb"] Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.206405 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8"] Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.213176 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4"] Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.220971 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tbg27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-8cxm8_openstack-operators(d27eeff8-d1a3-4d08-a474-076b14194921): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.223953 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tbg27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-8cxm8_openstack-operators(d27eeff8-d1a3-4d08-a474-076b14194921): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.225036 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" podUID="d27eeff8-d1a3-4d08-a474-076b14194921" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.225921 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ksp6h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-57gx4_openstack-operators(2db5775a-2728-4581-98c9-155056e55c21): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.227756 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ksp6h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-58d5ff84df-57gx4_openstack-operators(2db5775a-2728-4581-98c9-155056e55c21): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.229418 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" podUID="2db5775a-2728-4581-98c9-155056e55c21" Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.789670 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" event={"ID":"02302af5-b29e-4346-9f30-70ec3d5f8b59","Type":"ContainerStarted","Data":"34295b53118254a05a631f8eb079ed64e30a7632c2026ca949ff6f720936a673"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.791632 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" event={"ID":"5f83dee3-b4c5-4c8a-ba44-78d74195e59c","Type":"ContainerStarted","Data":"86d1497b87b57d4d957af6624d58f995f00dd2b35efe69c01b8591d7342d1f56"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.793077 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" event={"ID":"b169689d-8a97-407f-81f7-56497bc77f0b","Type":"ContainerStarted","Data":"10dbac8afef15398cb9b607c19bf933885f284f0be91ac10e91cfd0cfcaec18b"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.795556 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" event={"ID":"3daa730d-a51c-4330-8d36-712f27114f09","Type":"ContainerStarted","Data":"37c0412a888a6f39066f1121bd3508a772911e951377dea367e6da2fbc6a91de"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.797163 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" event={"ID":"2db5775a-2728-4581-98c9-155056e55c21","Type":"ContainerStarted","Data":"7a09f61190478c262b7408909ace0df84612662e1eb9084d1d3f4ecab4d4c5fe"} Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.799095 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" podUID="2db5775a-2728-4581-98c9-155056e55c21" Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.800526 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" event={"ID":"0afaf438-c06e-45c6-a814-d032d7a43700","Type":"ContainerStarted","Data":"72017007c6a16c51634e92b5f24ad0cf85d74309c96fc976f53429131fbeda94"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.801897 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" event={"ID":"3e2afd98-5854-4fd7-abe6-059174bf661e","Type":"ContainerStarted","Data":"05c16bacf6f5b34fd4c55deedf83e6ce60fb7c84419d918d6eb72ff3ad325d7e"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.803826 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" event={"ID":"35c1b18e-dac3-46c5-8714-44b5f7cc3462","Type":"ContainerStarted","Data":"24793a443b8cfa1e045cc61c0796c68e68b06c834897d1647968c038f76acffd"} Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.804409 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" podUID="3e2afd98-5854-4fd7-abe6-059174bf661e" Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.805838 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" event={"ID":"460630fb-9db1-487a-af29-d92b820e0a1b","Type":"ContainerStarted","Data":"8899232569ec0022c267c0d5a4698174b976e8c962eb0141b92341a3bc4770ef"} Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.810091 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" podUID="b169689d-8a97-407f-81f7-56497bc77f0b" Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.812589 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" event={"ID":"812c4aed-2b51-4ae9-b36d-c3ac85d47d73","Type":"ContainerStarted","Data":"4c78fcf4ddfbbe7d3119ef63f754f416d4586f3bfd312ab53f59d0405ba24970"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.844500 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" event={"ID":"e68c43d5-161b-4ab1-9592-8a2d7f32f7eb","Type":"ContainerStarted","Data":"9d19bde9a98dcd3c87fc1c45a9a009172af9c023e5b01bb8c0bb535f390fbd34"} Dec 10 15:35:11 crc kubenswrapper[4669]: I1210 15:35:11.856645 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" event={"ID":"d27eeff8-d1a3-4d08-a474-076b14194921","Type":"ContainerStarted","Data":"532aad5c2b8966534acde1b2bad1e29c3c4dd44dc0dafc6a769ae23afa3ca18b"} Dec 10 15:35:11 crc kubenswrapper[4669]: E1210 15:35:11.863031 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" podUID="d27eeff8-d1a3-4d08-a474-076b14194921" Dec 10 15:35:12 crc kubenswrapper[4669]: I1210 15:35:12.145763 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.146090 4669 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.146140 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert podName:cfb93e7d-25ad-468f-8b68-9b6b57676a5a nodeName:}" failed. No retries permitted until 2025-12-10 15:35:16.146123944 +0000 UTC m=+890.063070571 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert") pod "infra-operator-controller-manager-78d48bff9d-qq45r" (UID: "cfb93e7d-25ad-468f-8b68-9b6b57676a5a") : secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:12 crc kubenswrapper[4669]: I1210 15:35:12.554594 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.554765 4669 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.554845 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert podName:b8b8c80e-24e4-40bc-9927-21ce8b6c2667 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:16.554825948 +0000 UTC m=+890.471772565 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f99k4v" (UID: "b8b8c80e-24e4-40bc-9927-21ce8b6c2667") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.873607 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" podUID="b169689d-8a97-407f-81f7-56497bc77f0b" Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.874473 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:961417d59f527d925ac48ff6a11de747d0493315e496e34dc83d76a1a1fff58a\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" podUID="3e2afd98-5854-4fd7-abe6-059174bf661e" Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.874540 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" podUID="d27eeff8-d1a3-4d08-a474-076b14194921" Dec 10 15:35:12 crc kubenswrapper[4669]: E1210 15:35:12.897925 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:f27e732ec1faee765461bf137d9be81278b2fa39675019a73622755e1e610b6f\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" podUID="2db5775a-2728-4581-98c9-155056e55c21" Dec 10 15:35:13 crc kubenswrapper[4669]: E1210 15:35:13.062186 4669 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 15:35:13 crc kubenswrapper[4669]: I1210 15:35:13.061908 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:13 crc kubenswrapper[4669]: I1210 15:35:13.064002 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:13 crc kubenswrapper[4669]: E1210 15:35:13.064586 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:17.064569393 +0000 UTC m=+890.981516020 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "metrics-server-cert" not found Dec 10 15:35:13 crc kubenswrapper[4669]: E1210 15:35:13.065341 4669 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 15:35:13 crc kubenswrapper[4669]: E1210 15:35:13.065437 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:17.065414646 +0000 UTC m=+890.982361333 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "webhook-server-cert" not found Dec 10 15:35:16 crc kubenswrapper[4669]: I1210 15:35:16.218545 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:16 crc kubenswrapper[4669]: E1210 15:35:16.218730 4669 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:16 crc kubenswrapper[4669]: E1210 15:35:16.218904 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert podName:cfb93e7d-25ad-468f-8b68-9b6b57676a5a nodeName:}" failed. No retries permitted until 2025-12-10 15:35:24.218889204 +0000 UTC m=+898.135835831 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert") pod "infra-operator-controller-manager-78d48bff9d-qq45r" (UID: "cfb93e7d-25ad-468f-8b68-9b6b57676a5a") : secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:16 crc kubenswrapper[4669]: I1210 15:35:16.624519 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:16 crc kubenswrapper[4669]: E1210 15:35:16.624748 4669 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:16 crc kubenswrapper[4669]: E1210 15:35:16.624797 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert podName:b8b8c80e-24e4-40bc-9927-21ce8b6c2667 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:24.624783465 +0000 UTC m=+898.541730082 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f99k4v" (UID: "b8b8c80e-24e4-40bc-9927-21ce8b6c2667") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:17 crc kubenswrapper[4669]: I1210 15:35:17.129648 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:17 crc kubenswrapper[4669]: I1210 15:35:17.129708 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:17 crc kubenswrapper[4669]: E1210 15:35:17.129889 4669 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 15:35:17 crc kubenswrapper[4669]: E1210 15:35:17.129948 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:25.12993151 +0000 UTC m=+899.046878137 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "webhook-server-cert" not found Dec 10 15:35:17 crc kubenswrapper[4669]: E1210 15:35:17.129889 4669 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 15:35:17 crc kubenswrapper[4669]: E1210 15:35:17.130016 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:25.130001742 +0000 UTC m=+899.046948369 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "metrics-server-cert" not found Dec 10 15:35:24 crc kubenswrapper[4669]: I1210 15:35:24.236597 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:24 crc kubenswrapper[4669]: E1210 15:35:24.236902 4669 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:24 crc kubenswrapper[4669]: E1210 15:35:24.237377 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert podName:cfb93e7d-25ad-468f-8b68-9b6b57676a5a nodeName:}" failed. No retries permitted until 2025-12-10 15:35:40.237334963 +0000 UTC m=+914.154281630 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert") pod "infra-operator-controller-manager-78d48bff9d-qq45r" (UID: "cfb93e7d-25ad-468f-8b68-9b6b57676a5a") : secret "infra-operator-webhook-server-cert" not found Dec 10 15:35:24 crc kubenswrapper[4669]: I1210 15:35:24.644726 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:24 crc kubenswrapper[4669]: E1210 15:35:24.644851 4669 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:24 crc kubenswrapper[4669]: E1210 15:35:24.644908 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert podName:b8b8c80e-24e4-40bc-9927-21ce8b6c2667 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:40.644891187 +0000 UTC m=+914.561837814 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert") pod "openstack-baremetal-operator-controller-manager-84b575879f99k4v" (UID: "b8b8c80e-24e4-40bc-9927-21ce8b6c2667") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 10 15:35:25 crc kubenswrapper[4669]: I1210 15:35:25.152974 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:25 crc kubenswrapper[4669]: I1210 15:35:25.153057 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:25 crc kubenswrapper[4669]: E1210 15:35:25.153256 4669 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 10 15:35:25 crc kubenswrapper[4669]: E1210 15:35:25.153340 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:41.153318419 +0000 UTC m=+915.070265056 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "webhook-server-cert" not found Dec 10 15:35:25 crc kubenswrapper[4669]: E1210 15:35:25.153255 4669 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 10 15:35:25 crc kubenswrapper[4669]: E1210 15:35:25.153447 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs podName:1530ae38-d334-436e-9599-54f0caeaf3c4 nodeName:}" failed. No retries permitted until 2025-12-10 15:35:41.153416481 +0000 UTC m=+915.070363138 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs") pod "openstack-operator-controller-manager-565cd4c864-2bxld" (UID: "1530ae38-d334-436e-9599-54f0caeaf3c4") : secret "metrics-server-cert" not found Dec 10 15:35:26 crc kubenswrapper[4669]: E1210 15:35:26.607019 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 10 15:35:26 crc kubenswrapper[4669]: E1210 15:35:26.607468 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jxqdm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-6ln9s_openstack-operators(5f83dee3-b4c5-4c8a-ba44-78d74195e59c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:27 crc kubenswrapper[4669]: E1210 15:35:27.202614 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3" Dec 10 15:35:27 crc kubenswrapper[4669]: E1210 15:35:27.203368 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:981b6a8f95934a86c5f10ef6e198b07265aeba7f11cf84b9ccd13dfaf06f3ca3,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mftgr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-6c677c69b-d6c6m_openstack-operators(4ecff5d1-1a76-4282-a11e-ee74b69e7450): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:30 crc kubenswrapper[4669]: E1210 15:35:30.364280 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 10 15:35:30 crc kubenswrapper[4669]: E1210 15:35:30.364850 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zv79m,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-cgdhp_openstack-operators(e68c43d5-161b-4ab1-9592-8a2d7f32f7eb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:31 crc kubenswrapper[4669]: E1210 15:35:31.185293 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 10 15:35:31 crc kubenswrapper[4669]: E1210 15:35:31.185684 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-s8zsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-2x88f_openstack-operators(812c4aed-2b51-4ae9-b36d-c3ac85d47d73): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:35 crc kubenswrapper[4669]: E1210 15:35:35.664750 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 10 15:35:35 crc kubenswrapper[4669]: E1210 15:35:35.665244 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lf2r5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-dsxdb_openstack-operators(0afaf438-c06e-45c6-a814-d032d7a43700): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:36 crc kubenswrapper[4669]: E1210 15:35:36.110494 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 10 15:35:36 crc kubenswrapper[4669]: E1210 15:35:36.110973 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jmll5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-dg5lw_openstack-operators(02302af5-b29e-4346-9f30-70ec3d5f8b59): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:36 crc kubenswrapper[4669]: E1210 15:35:36.112486 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" podUID="02302af5-b29e-4346-9f30-70ec3d5f8b59" Dec 10 15:35:37 crc kubenswrapper[4669]: E1210 15:35:37.033536 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" podUID="02302af5-b29e-4346-9f30-70ec3d5f8b59" Dec 10 15:35:38 crc kubenswrapper[4669]: E1210 15:35:38.340853 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027" Dec 10 15:35:38 crc kubenswrapper[4669]: E1210 15:35:38.341053 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:5370dc4a8e776923eec00bb50cbdb2e390e9dde50be26bdc04a216bd2d6b5027,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ks7g5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-5697bb5779-vzhnx_openstack-operators(9b8ca892-98a2-4e46-816f-548631ceaf50): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:39 crc kubenswrapper[4669]: E1210 15:35:39.150974 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 10 15:35:39 crc kubenswrapper[4669]: E1210 15:35:39.151317 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hmm9l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-lhblg_openstack-operators(a260011c-3fcf-47cd-9472-20b180b4bd2f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:39 crc kubenswrapper[4669]: E1210 15:35:39.665647 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad" Dec 10 15:35:39 crc kubenswrapper[4669]: E1210 15:35:39.665886 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:424da951f13f1fbe9083215dc9f5088f90676dd813f01fdf3c1a8639b61cbaad,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6kv4f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-79c8c4686c-866l8_openstack-operators(3daa730d-a51c-4330-8d36-712f27114f09): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.280545 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.298367 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/cfb93e7d-25ad-468f-8b68-9b6b57676a5a-cert\") pod \"infra-operator-controller-manager-78d48bff9d-qq45r\" (UID: \"cfb93e7d-25ad-468f-8b68-9b6b57676a5a\") " pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:40 crc kubenswrapper[4669]: E1210 15:35:40.364769 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 10 15:35:40 crc kubenswrapper[4669]: E1210 15:35:40.365260 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tgv5g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-t7wdn_openstack-operators(460630fb-9db1-487a-af29-d92b820e0a1b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.442143 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-4fk6w" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.450789 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.687075 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.692308 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b8b8c80e-24e4-40bc-9927-21ce8b6c2667-cert\") pod \"openstack-baremetal-operator-controller-manager-84b575879f99k4v\" (UID: \"b8b8c80e-24e4-40bc-9927-21ce8b6c2667\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.861512 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-2s44z" Dec 10 15:35:40 crc kubenswrapper[4669]: I1210 15:35:40.870455 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:40 crc kubenswrapper[4669]: E1210 15:35:40.944369 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991" Dec 10 15:35:40 crc kubenswrapper[4669]: E1210 15:35:40.944576 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-q8fk5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9d58d64bc-n8qx2_openstack-operators(b169689d-8a97-407f-81f7-56497bc77f0b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:41 crc kubenswrapper[4669]: I1210 15:35:41.194430 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:41 crc kubenswrapper[4669]: I1210 15:35:41.194498 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:41 crc kubenswrapper[4669]: I1210 15:35:41.198632 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-metrics-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:41 crc kubenswrapper[4669]: I1210 15:35:41.204135 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/1530ae38-d334-436e-9599-54f0caeaf3c4-webhook-certs\") pod \"openstack-operator-controller-manager-565cd4c864-2bxld\" (UID: \"1530ae38-d334-436e-9599-54f0caeaf3c4\") " pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:41 crc kubenswrapper[4669]: I1210 15:35:41.351737 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-hgkgr" Dec 10 15:35:41 crc kubenswrapper[4669]: I1210 15:35:41.359675 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:41 crc kubenswrapper[4669]: E1210 15:35:41.602125 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 10 15:35:41 crc kubenswrapper[4669]: E1210 15:35:41.602383 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-klzng,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-fkxpl_openstack-operators(edd21671-d820-4fb7-835e-97fd0ade3909): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:43 crc kubenswrapper[4669]: E1210 15:35:43.526146 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 10 15:35:43 crc kubenswrapper[4669]: E1210 15:35:43.526648 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-tbg27,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-8cxm8_openstack-operators(d27eeff8-d1a3-4d08-a474-076b14194921): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:35:44 crc kubenswrapper[4669]: I1210 15:35:44.180477 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld"] Dec 10 15:35:44 crc kubenswrapper[4669]: W1210 15:35:44.258456 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1530ae38_d334_436e_9599_54f0caeaf3c4.slice/crio-3efb6870522f52d51af17ec42a7317f496ad128555659c67acec2537619f9201 WatchSource:0}: Error finding container 3efb6870522f52d51af17ec42a7317f496ad128555659c67acec2537619f9201: Status 404 returned error can't find the container with id 3efb6870522f52d51af17ec42a7317f496ad128555659c67acec2537619f9201 Dec 10 15:35:44 crc kubenswrapper[4669]: I1210 15:35:44.690511 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v"] Dec 10 15:35:44 crc kubenswrapper[4669]: I1210 15:35:44.765829 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r"] Dec 10 15:35:44 crc kubenswrapper[4669]: W1210 15:35:44.875642 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfb93e7d_25ad_468f_8b68_9b6b57676a5a.slice/crio-fa49beeb2cf25cec9c90e88ac2ba60dbe9b779300e331fbbead340e5ccae5901 WatchSource:0}: Error finding container fa49beeb2cf25cec9c90e88ac2ba60dbe9b779300e331fbbead340e5ccae5901: Status 404 returned error can't find the container with id fa49beeb2cf25cec9c90e88ac2ba60dbe9b779300e331fbbead340e5ccae5901 Dec 10 15:35:45 crc kubenswrapper[4669]: I1210 15:35:45.098130 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" event={"ID":"1530ae38-d334-436e-9599-54f0caeaf3c4","Type":"ContainerStarted","Data":"3efb6870522f52d51af17ec42a7317f496ad128555659c67acec2537619f9201"} Dec 10 15:35:45 crc kubenswrapper[4669]: I1210 15:35:45.103529 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" event={"ID":"b8b8c80e-24e4-40bc-9927-21ce8b6c2667","Type":"ContainerStarted","Data":"dc9d850e82648e4e64ec43da32a29a09adb5af5989ad9f69aff8b176ec795eb7"} Dec 10 15:35:45 crc kubenswrapper[4669]: I1210 15:35:45.105831 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" event={"ID":"cfb93e7d-25ad-468f-8b68-9b6b57676a5a","Type":"ContainerStarted","Data":"fa49beeb2cf25cec9c90e88ac2ba60dbe9b779300e331fbbead340e5ccae5901"} Dec 10 15:35:45 crc kubenswrapper[4669]: I1210 15:35:45.107258 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" event={"ID":"f0da37f3-9f8c-4d66-ba13-6c1da41ceba2","Type":"ContainerStarted","Data":"37ac20668fcebbee8f3643fb91bc34666f7f841b782ddb49951b14b83b9769e3"} Dec 10 15:35:46 crc kubenswrapper[4669]: I1210 15:35:46.121042 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" event={"ID":"35c1b18e-dac3-46c5-8714-44b5f7cc3462","Type":"ContainerStarted","Data":"93cee58f423732262293a8b7a045683c0b4f99c6c3d14f8addcdb1be49f718a6"} Dec 10 15:35:46 crc kubenswrapper[4669]: I1210 15:35:46.122693 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" event={"ID":"332b9f2c-9474-4368-9d76-3e98561c2279","Type":"ContainerStarted","Data":"22fd848d5f04ad5c49b98ce4dcaaf1a6edad89da11027e8f426fc6157f8b39a8"} Dec 10 15:35:46 crc kubenswrapper[4669]: I1210 15:35:46.124755 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" event={"ID":"c24a6a06-852c-476f-bb86-03c1e2430a48","Type":"ContainerStarted","Data":"f7722e1a355d0ee2967a4cf4f25c91bcce1b98641907cc4fffb6c108089ae94f"} Dec 10 15:35:46 crc kubenswrapper[4669]: I1210 15:35:46.125822 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" event={"ID":"ec748f3b-e193-43da-8d3b-c6d6169f58b5","Type":"ContainerStarted","Data":"26f6af8059990f1201a16894b17d8f7cc1ed9176538c4dce9da859b6fb314d62"} Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.420267 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-lfxdv"] Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.421943 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.442551 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lfxdv"] Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.525780 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njb9d\" (UniqueName: \"kubernetes.io/projected/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-kube-api-access-njb9d\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.525890 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-utilities\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.525940 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-catalog-content\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.628018 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-catalog-content\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.628136 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njb9d\" (UniqueName: \"kubernetes.io/projected/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-kube-api-access-njb9d\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.628240 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-utilities\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.628896 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-catalog-content\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.628974 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-utilities\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.655725 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njb9d\" (UniqueName: \"kubernetes.io/projected/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-kube-api-access-njb9d\") pod \"certified-operators-lfxdv\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:47 crc kubenswrapper[4669]: I1210 15:35:47.739513 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:35:48 crc kubenswrapper[4669]: I1210 15:35:48.141133 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" event={"ID":"2db5775a-2728-4581-98c9-155056e55c21","Type":"ContainerStarted","Data":"8c89aa480696d8de6ccc76715d411c21019fec7bcc3b2b80abdad4d3d91f06ec"} Dec 10 15:35:48 crc kubenswrapper[4669]: I1210 15:35:48.145484 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" event={"ID":"3e2afd98-5854-4fd7-abe6-059174bf661e","Type":"ContainerStarted","Data":"f9ba4f1cf5b0460ac41c8510a4909755c5fd3f32f09321634756cdd1696fd04f"} Dec 10 15:35:48 crc kubenswrapper[4669]: I1210 15:35:48.148487 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" event={"ID":"1530ae38-d334-436e-9599-54f0caeaf3c4","Type":"ContainerStarted","Data":"71d3972c873ca571db56d1efa8f02bac63736d0fd19da9c19bf20a69de239930"} Dec 10 15:35:48 crc kubenswrapper[4669]: I1210 15:35:48.149350 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:35:48 crc kubenswrapper[4669]: I1210 15:35:48.185631 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" podStartSLOduration=39.18561446 podStartE2EDuration="39.18561446s" podCreationTimestamp="2025-12-10 15:35:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:35:48.179914217 +0000 UTC m=+922.096860834" watchObservedRunningTime="2025-12-10 15:35:48.18561446 +0000 UTC m=+922.102561087" Dec 10 15:35:50 crc kubenswrapper[4669]: I1210 15:35:50.611899 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-lfxdv"] Dec 10 15:35:51 crc kubenswrapper[4669]: E1210 15:35:51.424129 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" podUID="e68c43d5-161b-4ab1-9592-8a2d7f32f7eb" Dec 10 15:35:51 crc kubenswrapper[4669]: E1210 15:35:51.428341 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" podUID="d27eeff8-d1a3-4d08-a474-076b14194921" Dec 10 15:35:51 crc kubenswrapper[4669]: E1210 15:35:51.431671 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" podUID="812c4aed-2b51-4ae9-b36d-c3ac85d47d73" Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.178446 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" event={"ID":"d27eeff8-d1a3-4d08-a474-076b14194921","Type":"ContainerStarted","Data":"1040469bf745d9f77f8ebae7232a8a3969a2e3b96d89f2661dc98538ba8b6c56"} Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.179819 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerStarted","Data":"efd7bfeb5610bd9b7ed7cb596e4451a772f3c7cff574df664cad92985c1945b9"} Dec 10 15:35:52 crc kubenswrapper[4669]: E1210 15:35:52.182013 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" podUID="d27eeff8-d1a3-4d08-a474-076b14194921" Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.184713 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" event={"ID":"f0da37f3-9f8c-4d66-ba13-6c1da41ceba2","Type":"ContainerStarted","Data":"7a89b9bc209c9b263a1d19518f581a47e0e168502241a3cca1e27dce5fbe9b22"} Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.185436 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.187574 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" event={"ID":"812c4aed-2b51-4ae9-b36d-c3ac85d47d73","Type":"ContainerStarted","Data":"99b0e7b08db5498dc7d70e9129c488883adabb272e6b9f47c3f1a6b0b69b62d9"} Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.189241 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.203142 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" event={"ID":"e68c43d5-161b-4ab1-9592-8a2d7f32f7eb","Type":"ContainerStarted","Data":"a82b824ce996ed14748ad738ffa648e68ea171e5bd6969482d626815f483b28f"} Dec 10 15:35:52 crc kubenswrapper[4669]: I1210 15:35:52.242008 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-697fb699cf-47j5t" podStartSLOduration=4.112161977 podStartE2EDuration="44.241989852s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.313246799 +0000 UTC m=+884.230193426" lastFinishedPulling="2025-12-10 15:35:50.443074674 +0000 UTC m=+924.360021301" observedRunningTime="2025-12-10 15:35:52.234114163 +0000 UTC m=+926.151060790" watchObservedRunningTime="2025-12-10 15:35:52.241989852 +0000 UTC m=+926.158936479" Dec 10 15:35:52 crc kubenswrapper[4669]: E1210 15:35:52.725277 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" podUID="4ecff5d1-1a76-4282-a11e-ee74b69e7450" Dec 10 15:35:53 crc kubenswrapper[4669]: E1210 15:35:53.148616 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" podUID="9b8ca892-98a2-4e46-816f-548631ceaf50" Dec 10 15:35:53 crc kubenswrapper[4669]: E1210 15:35:53.227075 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" podUID="a260011c-3fcf-47cd-9472-20b180b4bd2f" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.227881 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" event={"ID":"4ecff5d1-1a76-4282-a11e-ee74b69e7450","Type":"ContainerStarted","Data":"0730d045e8347ec6f62704ece95cffaad783135c87c7027ae27987c1013383bb"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.243405 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" event={"ID":"b8b8c80e-24e4-40bc-9927-21ce8b6c2667","Type":"ContainerStarted","Data":"281bba3a2eb6dfefd691f9d0d5c5e7f51582402a0cd8623dec6c08604b521968"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.277670 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" event={"ID":"ec748f3b-e193-43da-8d3b-c6d6169f58b5","Type":"ContainerStarted","Data":"ac02d8aa81f69f65eeb6c2984b6964af61a7bb273c60c950e0c3b5f4a12fddae"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.278500 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.286761 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.303467 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" event={"ID":"35c1b18e-dac3-46c5-8714-44b5f7cc3462","Type":"ContainerStarted","Data":"c414a44d7aee2a9c633fc2f3e6fcf77ba5f5b194d6991eab4392dabc660822b3"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.303505 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.316111 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.317800 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" event={"ID":"332b9f2c-9474-4368-9d76-3e98561c2279","Type":"ContainerStarted","Data":"a15a1c718a25f86aa36c9da3386d9b2fe73b04f91746652ed056816f0d7a7c7c"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.318646 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.322791 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" event={"ID":"c24a6a06-852c-476f-bb86-03c1e2430a48","Type":"ContainerStarted","Data":"9bdd108173e604020ba6e79c6161490d1bb093a35c4091527451e3ed10a318c8"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.323588 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.325988 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.326871 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.338249 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" event={"ID":"9b8ca892-98a2-4e46-816f-548631ceaf50","Type":"ContainerStarted","Data":"646ac726fe172dac4d87faf4b094497d5f536dbf12f3929a217adac4b3006354"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.345960 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-5zbv5" podStartSLOduration=3.837395409 podStartE2EDuration="45.345944159s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.099773923 +0000 UTC m=+885.016720540" lastFinishedPulling="2025-12-10 15:35:52.608322663 +0000 UTC m=+926.525269290" observedRunningTime="2025-12-10 15:35:53.342254026 +0000 UTC m=+927.259200663" watchObservedRunningTime="2025-12-10 15:35:53.345944159 +0000 UTC m=+927.262890786" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.349142 4669 generic.go:334] "Generic (PLEG): container finished" podID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerID="886f62d20b02bc39c8ff08953a107dd8cc2557ac340f7f3cbe2ab2e975ea268b" exitCode=0 Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.349399 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerDied","Data":"886f62d20b02bc39c8ff08953a107dd8cc2557ac340f7f3cbe2ab2e975ea268b"} Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.368488 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-gkcl6" podStartSLOduration=4.230737435 podStartE2EDuration="45.368471797s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.310559809 +0000 UTC m=+884.227506436" lastFinishedPulling="2025-12-10 15:35:51.448294171 +0000 UTC m=+925.365240798" observedRunningTime="2025-12-10 15:35:53.367609356 +0000 UTC m=+927.284555983" watchObservedRunningTime="2025-12-10 15:35:53.368471797 +0000 UTC m=+927.285418424" Dec 10 15:35:53 crc kubenswrapper[4669]: E1210 15:35:53.448071 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" podUID="3daa730d-a51c-4330-8d36-712f27114f09" Dec 10 15:35:53 crc kubenswrapper[4669]: E1210 15:35:53.449427 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" podUID="b169689d-8a97-407f-81f7-56497bc77f0b" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.458544 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-5b5fd79c9c-c9chn" podStartSLOduration=3.500743263 podStartE2EDuration="45.458530329s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.630191423 +0000 UTC m=+884.547138050" lastFinishedPulling="2025-12-10 15:35:52.587978479 +0000 UTC m=+926.504925116" observedRunningTime="2025-12-10 15:35:53.45462409 +0000 UTC m=+927.371570717" watchObservedRunningTime="2025-12-10 15:35:53.458530329 +0000 UTC m=+927.375476956" Dec 10 15:35:53 crc kubenswrapper[4669]: I1210 15:35:53.508544 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-967d97867-sh22l" podStartSLOduration=3.617176134 podStartE2EDuration="45.508525671s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.707066225 +0000 UTC m=+884.624012852" lastFinishedPulling="2025-12-10 15:35:52.598415762 +0000 UTC m=+926.515362389" observedRunningTime="2025-12-10 15:35:53.481515578 +0000 UTC m=+927.398462205" watchObservedRunningTime="2025-12-10 15:35:53.508525671 +0000 UTC m=+927.425472298" Dec 10 15:35:53 crc kubenswrapper[4669]: E1210 15:35:53.630586 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" podUID="0afaf438-c06e-45c6-a814-d032d7a43700" Dec 10 15:35:53 crc kubenswrapper[4669]: E1210 15:35:53.665369 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" podUID="edd21671-d820-4fb7-835e-97fd0ade3909" Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.355719 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" event={"ID":"edd21671-d820-4fb7-835e-97fd0ade3909","Type":"ContainerStarted","Data":"0e1897ea6b38d44affea19531541b4cf16001b468d4329f779a1e407fbb51a6a"} Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.357876 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" event={"ID":"b169689d-8a97-407f-81f7-56497bc77f0b","Type":"ContainerStarted","Data":"832af2be7c0f5cc3b1f9ddfc1ea16fd3b18f04ca7966e99cb998e89ffb0a12ab"} Dec 10 15:35:54 crc kubenswrapper[4669]: E1210 15:35:54.359291 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:3aa109bb973253ae9dcf339b9b65abbd1176cdb4be672c93e538a5f113816991\\\"\"" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" podUID="b169689d-8a97-407f-81f7-56497bc77f0b" Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.360201 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" event={"ID":"3daa730d-a51c-4330-8d36-712f27114f09","Type":"ContainerStarted","Data":"1b678d06de197b0047495ee5bf5eda8e4b9eead3df2ac70caeae23d7c4bc83e2"} Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.361538 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" event={"ID":"a260011c-3fcf-47cd-9472-20b180b4bd2f","Type":"ContainerStarted","Data":"971218e27daaca25459f1fb5b813292bfaf06d4062f772c2ce342fee80dfcedf"} Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.364317 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" event={"ID":"0afaf438-c06e-45c6-a814-d032d7a43700","Type":"ContainerStarted","Data":"589b53945f4e54007bea9149e7f7883ac7d5a1e1f964f835a887b445d0df98d9"} Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.367758 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" event={"ID":"3e2afd98-5854-4fd7-abe6-059174bf661e","Type":"ContainerStarted","Data":"59e2c2b90b67aa364cb1413f9af0b48f19a734ca1253d759b193dacac19ebcfe"} Dec 10 15:35:54 crc kubenswrapper[4669]: I1210 15:35:54.496298 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" podStartSLOduration=5.008433145 podStartE2EDuration="46.496277206s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.12041568 +0000 UTC m=+885.037362307" lastFinishedPulling="2025-12-10 15:35:52.608259741 +0000 UTC m=+926.525206368" observedRunningTime="2025-12-10 15:35:54.493425294 +0000 UTC m=+928.410371931" watchObservedRunningTime="2025-12-10 15:35:54.496277206 +0000 UTC m=+928.413223833" Dec 10 15:35:55 crc kubenswrapper[4669]: E1210 15:35:55.229557 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" podUID="460630fb-9db1-487a-af29-d92b820e0a1b" Dec 10 15:35:55 crc kubenswrapper[4669]: E1210 15:35:55.264609 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" podUID="5f83dee3-b4c5-4c8a-ba44-78d74195e59c" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.375649 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" event={"ID":"460630fb-9db1-487a-af29-d92b820e0a1b","Type":"ContainerStarted","Data":"6daa9e3741df5e7638e2268aa52118329a0ae47f288b4866b6241aaf72490404"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.377192 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.380112 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" event={"ID":"5f83dee3-b4c5-4c8a-ba44-78d74195e59c","Type":"ContainerStarted","Data":"1d37dc6fc695a706dda55c602ea39c5b321fe4c2a62f0e4917535ed840b80c06"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.388258 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" event={"ID":"812c4aed-2b51-4ae9-b36d-c3ac85d47d73","Type":"ContainerStarted","Data":"d54ec974c23ca290fcd4a4f9f75a8c4969d7ae2088193dcc95da0a4b2feb90bf"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.388991 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.396395 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" event={"ID":"e68c43d5-161b-4ab1-9592-8a2d7f32f7eb","Type":"ContainerStarted","Data":"51588a7ea1974b5c2e2cdc518154b5441c05b588b42e1880a7bf3f2ce0953466"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.396453 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.404855 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" event={"ID":"2db5775a-2728-4581-98c9-155056e55c21","Type":"ContainerStarted","Data":"5ad8ef625c93ecd4dd33c87ffb4bf157b50758264c970ac4f0416393b81fea45"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.405759 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.412646 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.425422 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" event={"ID":"b8b8c80e-24e4-40bc-9927-21ce8b6c2667","Type":"ContainerStarted","Data":"a9a04008e511ee8f9039f0655e7e0b4875bd8a2dd9040742fbf244472c2d8132"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.426122 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.433102 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" event={"ID":"02302af5-b29e-4346-9f30-70ec3d5f8b59","Type":"ContainerStarted","Data":"79107c8db4072325f282081108b542f0e61288d9bb89d1bcb3cdb206e696853c"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.439984 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" event={"ID":"cfb93e7d-25ad-468f-8b68-9b6b57676a5a","Type":"ContainerStarted","Data":"7558bbee663b49debd9c9b647ace91d6375416ff57d52565a2e3b65f23e6a9c2"} Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.442115 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.457959 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-75944c9b7-w4vkk" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.579880 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-58d5ff84df-57gx4" podStartSLOduration=5.909255206 podStartE2EDuration="47.57986467s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.225793915 +0000 UTC m=+885.142740542" lastFinishedPulling="2025-12-10 15:35:52.896403379 +0000 UTC m=+926.813350006" observedRunningTime="2025-12-10 15:35:55.490547406 +0000 UTC m=+929.407494023" watchObservedRunningTime="2025-12-10 15:35:55.57986467 +0000 UTC m=+929.496811297" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.626241 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" podStartSLOduration=5.31438375 podStartE2EDuration="47.626224639s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.846448996 +0000 UTC m=+884.763395623" lastFinishedPulling="2025-12-10 15:35:53.158289885 +0000 UTC m=+927.075236512" observedRunningTime="2025-12-10 15:35:55.622984408 +0000 UTC m=+929.539931035" watchObservedRunningTime="2025-12-10 15:35:55.626224639 +0000 UTC m=+929.543171266" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.630911 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dg5lw" podStartSLOduration=4.811685816 podStartE2EDuration="46.629382488s" podCreationTimestamp="2025-12-10 15:35:09 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.119097616 +0000 UTC m=+885.036044243" lastFinishedPulling="2025-12-10 15:35:52.936794288 +0000 UTC m=+926.853740915" observedRunningTime="2025-12-10 15:35:55.585386239 +0000 UTC m=+929.502332866" watchObservedRunningTime="2025-12-10 15:35:55.629382488 +0000 UTC m=+929.546329115" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.661849 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" podStartSLOduration=5.072328311 podStartE2EDuration="47.661834987s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.039806121 +0000 UTC m=+884.956752748" lastFinishedPulling="2025-12-10 15:35:53.629312797 +0000 UTC m=+927.546259424" observedRunningTime="2025-12-10 15:35:55.660535455 +0000 UTC m=+929.577482082" watchObservedRunningTime="2025-12-10 15:35:55.661834987 +0000 UTC m=+929.578781614" Dec 10 15:35:55 crc kubenswrapper[4669]: I1210 15:35:55.752210 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" podStartSLOduration=41.341454206 podStartE2EDuration="47.752190947s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:44.87819205 +0000 UTC m=+918.795138677" lastFinishedPulling="2025-12-10 15:35:51.288928791 +0000 UTC m=+925.205875418" observedRunningTime="2025-12-10 15:35:55.747682773 +0000 UTC m=+929.664629400" watchObservedRunningTime="2025-12-10 15:35:55.752190947 +0000 UTC m=+929.669137574" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.468410 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" event={"ID":"9b8ca892-98a2-4e46-816f-548631ceaf50","Type":"ContainerStarted","Data":"b499f7417e68cefd83170ac329abccf634a254e813e3e79e300fab8affe05c5b"} Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.469244 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.472454 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" event={"ID":"3daa730d-a51c-4330-8d36-712f27114f09","Type":"ContainerStarted","Data":"82e3f2ac0f1043d282ae72e3d2aa82dd2c08c35d7befaff1ae7aebe390c548ed"} Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.473085 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.478498 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" event={"ID":"a260011c-3fcf-47cd-9472-20b180b4bd2f","Type":"ContainerStarted","Data":"d8ce5767e13f3f8f60bba04da1cb2976336b37a1cd92c5032f25e2f5d8a6bba1"} Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.479206 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.490479 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" event={"ID":"0afaf438-c06e-45c6-a814-d032d7a43700","Type":"ContainerStarted","Data":"0ccc969d8fd35c937ed9047ebc49688cb334e5dd52534e435b54ac8803667811"} Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.491208 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.501309 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" podStartSLOduration=3.610653526 podStartE2EDuration="48.501292283s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.509795887 +0000 UTC m=+884.426742514" lastFinishedPulling="2025-12-10 15:35:55.400434644 +0000 UTC m=+929.317381271" observedRunningTime="2025-12-10 15:35:56.498893552 +0000 UTC m=+930.415840179" watchObservedRunningTime="2025-12-10 15:35:56.501292283 +0000 UTC m=+930.418238900" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.508302 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" event={"ID":"cfb93e7d-25ad-468f-8b68-9b6b57676a5a","Type":"ContainerStarted","Data":"433ae98a386f14c4b8a983bdc4150a3c7b0e2537d19ecb1b0a12f270a77a236a"} Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.509039 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.518584 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" podStartSLOduration=4.1448711639999996 podStartE2EDuration="49.518563528s" podCreationTimestamp="2025-12-10 15:35:07 +0000 UTC" firstStartedPulling="2025-12-10 15:35:09.942324429 +0000 UTC m=+883.859271056" lastFinishedPulling="2025-12-10 15:35:55.316016793 +0000 UTC m=+929.232963420" observedRunningTime="2025-12-10 15:35:56.513357867 +0000 UTC m=+930.430304494" watchObservedRunningTime="2025-12-10 15:35:56.518563528 +0000 UTC m=+930.435510155" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.520389 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" event={"ID":"4ecff5d1-1a76-4282-a11e-ee74b69e7450","Type":"ContainerStarted","Data":"0d565087570d3cbc0bd459160d172dc5a9747b3e5e9a7adc68430893e10665e7"} Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.520430 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.597404 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" podStartSLOduration=3.84198393 podStartE2EDuration="48.597382117s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.872674798 +0000 UTC m=+884.789621425" lastFinishedPulling="2025-12-10 15:35:55.628072985 +0000 UTC m=+929.545019612" observedRunningTime="2025-12-10 15:35:56.592599636 +0000 UTC m=+930.509546253" watchObservedRunningTime="2025-12-10 15:35:56.597382117 +0000 UTC m=+930.514328744" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.617324 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" podStartSLOduration=4.106125815 podStartE2EDuration="48.617304469s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.133013378 +0000 UTC m=+885.049959995" lastFinishedPulling="2025-12-10 15:35:55.644192022 +0000 UTC m=+929.561138649" observedRunningTime="2025-12-10 15:35:56.549832907 +0000 UTC m=+930.466779544" watchObservedRunningTime="2025-12-10 15:35:56.617304469 +0000 UTC m=+930.534251096" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.649290 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" podStartSLOduration=3.13891383 podStartE2EDuration="48.649271916s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.133831936 +0000 UTC m=+884.050778563" lastFinishedPulling="2025-12-10 15:35:55.644190032 +0000 UTC m=+929.561136649" observedRunningTime="2025-12-10 15:35:56.619452283 +0000 UTC m=+930.536398910" watchObservedRunningTime="2025-12-10 15:35:56.649271916 +0000 UTC m=+930.566218543" Dec 10 15:35:56 crc kubenswrapper[4669]: I1210 15:35:56.656463 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" podStartSLOduration=40.817437459 podStartE2EDuration="48.656447967s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:44.894850701 +0000 UTC m=+918.811797328" lastFinishedPulling="2025-12-10 15:35:52.733861209 +0000 UTC m=+926.650807836" observedRunningTime="2025-12-10 15:35:56.647740417 +0000 UTC m=+930.564687044" watchObservedRunningTime="2025-12-10 15:35:56.656447967 +0000 UTC m=+930.573394594" Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.526035 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerStarted","Data":"cc1a486ecade6404a23d485ca33c8f7d288f3229a283c6eca8b8e6699bee4262"} Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.529671 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" event={"ID":"460630fb-9db1-487a-af29-d92b820e0a1b","Type":"ContainerStarted","Data":"4f7830d904bf444405b57885db3e42bc446681f90f8ed229893ca01e0b56f2a4"} Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.530200 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.532173 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" event={"ID":"edd21671-d820-4fb7-835e-97fd0ade3909","Type":"ContainerStarted","Data":"fde6e934b0ebc76b077a63d2e60260364ae8984c704104ae395d4ff3f2bd9133"} Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.532714 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.534332 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" event={"ID":"5f83dee3-b4c5-4c8a-ba44-78d74195e59c","Type":"ContainerStarted","Data":"f7c307a0ac803b0795f38f3f1b64686ef8505d9119422e0d136a6b2298c0cbb0"} Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.576876 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" podStartSLOduration=4.278790065 podStartE2EDuration="49.576862774s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.771230446 +0000 UTC m=+884.688177073" lastFinishedPulling="2025-12-10 15:35:56.069303155 +0000 UTC m=+929.986249782" observedRunningTime="2025-12-10 15:35:57.575231273 +0000 UTC m=+931.492177900" watchObservedRunningTime="2025-12-10 15:35:57.576862774 +0000 UTC m=+931.493809401" Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.594417 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" podStartSLOduration=4.384736995 podStartE2EDuration="49.594400106s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:10.854075634 +0000 UTC m=+884.771022261" lastFinishedPulling="2025-12-10 15:35:56.063738745 +0000 UTC m=+929.980685372" observedRunningTime="2025-12-10 15:35:57.594059357 +0000 UTC m=+931.511005984" watchObservedRunningTime="2025-12-10 15:35:57.594400106 +0000 UTC m=+931.511346733" Dec 10 15:35:57 crc kubenswrapper[4669]: I1210 15:35:57.617488 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" podStartSLOduration=3.172187059 podStartE2EDuration="49.617471088s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:09.926538778 +0000 UTC m=+883.843485395" lastFinishedPulling="2025-12-10 15:35:56.371822797 +0000 UTC m=+930.288769424" observedRunningTime="2025-12-10 15:35:57.613930139 +0000 UTC m=+931.530876766" watchObservedRunningTime="2025-12-10 15:35:57.617471088 +0000 UTC m=+931.534417715" Dec 10 15:35:58 crc kubenswrapper[4669]: I1210 15:35:58.541380 4669 generic.go:334] "Generic (PLEG): container finished" podID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerID="cc1a486ecade6404a23d485ca33c8f7d288f3229a283c6eca8b8e6699bee4262" exitCode=0 Dec 10 15:35:58 crc kubenswrapper[4669]: I1210 15:35:58.542498 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerDied","Data":"cc1a486ecade6404a23d485ca33c8f7d288f3229a283c6eca8b8e6699bee4262"} Dec 10 15:35:58 crc kubenswrapper[4669]: I1210 15:35:58.543337 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:35:59 crc kubenswrapper[4669]: I1210 15:35:59.552590 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerStarted","Data":"2864b477b23219b46bab5dd378cdc7319518bd3108c9cd827a850a6bd6c97155"} Dec 10 15:36:00 crc kubenswrapper[4669]: I1210 15:36:00.460933 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-78d48bff9d-qq45r" Dec 10 15:36:00 crc kubenswrapper[4669]: I1210 15:36:00.486342 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-lfxdv" podStartSLOduration=7.895257469 podStartE2EDuration="13.486323065s" podCreationTimestamp="2025-12-10 15:35:47 +0000 UTC" firstStartedPulling="2025-12-10 15:35:53.536257829 +0000 UTC m=+927.453204446" lastFinishedPulling="2025-12-10 15:35:59.127323415 +0000 UTC m=+933.044270042" observedRunningTime="2025-12-10 15:35:59.573309374 +0000 UTC m=+933.490256001" watchObservedRunningTime="2025-12-10 15:36:00.486323065 +0000 UTC m=+934.403269692" Dec 10 15:36:00 crc kubenswrapper[4669]: I1210 15:36:00.880108 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-84b575879f99k4v" Dec 10 15:36:01 crc kubenswrapper[4669]: I1210 15:36:01.366872 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-565cd4c864-2bxld" Dec 10 15:36:05 crc kubenswrapper[4669]: I1210 15:36:05.601857 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" event={"ID":"d27eeff8-d1a3-4d08-a474-076b14194921","Type":"ContainerStarted","Data":"e02474d7ee0c1894c615b3037deb1dc88c924d60b4e1a4d4f39020eb97eb4ec7"} Dec 10 15:36:05 crc kubenswrapper[4669]: I1210 15:36:05.603037 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:36:05 crc kubenswrapper[4669]: I1210 15:36:05.628706 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" podStartSLOduration=4.053182491 podStartE2EDuration="57.628689021s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.220830465 +0000 UTC m=+885.137777092" lastFinishedPulling="2025-12-10 15:36:04.796336995 +0000 UTC m=+938.713283622" observedRunningTime="2025-12-10 15:36:05.62230712 +0000 UTC m=+939.539253747" watchObservedRunningTime="2025-12-10 15:36:05.628689021 +0000 UTC m=+939.545635648" Dec 10 15:36:07 crc kubenswrapper[4669]: I1210 15:36:07.739732 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:36:07 crc kubenswrapper[4669]: I1210 15:36:07.740008 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:36:07 crc kubenswrapper[4669]: I1210 15:36:07.781671 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.366690 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-lhblg" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.378054 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-6c677c69b-d6c6m" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.453748 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-5697bb5779-vzhnx" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.501053 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-fkxpl" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.502637 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hczp6"] Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.504004 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.529903 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hczp6"] Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.546742 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-catalog-content\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.546880 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-utilities\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.546914 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4nbz\" (UniqueName: \"kubernetes.io/projected/ed8932d6-297b-44e1-aadd-476321cdc291-kube-api-access-d4nbz\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.647997 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-utilities\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.648060 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4nbz\" (UniqueName: \"kubernetes.io/projected/ed8932d6-297b-44e1-aadd-476321cdc291-kube-api-access-d4nbz\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.648144 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-catalog-content\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.648664 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-catalog-content\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.648931 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-utilities\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.663575 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.670312 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4nbz\" (UniqueName: \"kubernetes.io/projected/ed8932d6-297b-44e1-aadd-476321cdc291-kube-api-access-d4nbz\") pod \"redhat-operators-hczp6\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:08 crc kubenswrapper[4669]: I1210 15:36:08.836265 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.312290 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hczp6"] Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.466545 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-t7wdn" Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.631142 4669 generic.go:334] "Generic (PLEG): container finished" podID="ed8932d6-297b-44e1-aadd-476321cdc291" containerID="78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c" exitCode=0 Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.631194 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerDied","Data":"78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c"} Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.631285 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerStarted","Data":"e509de7962c907e66652ee11d6cbc051c1c77241ef40e4b96640a4b880f123a2"} Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.662770 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-79c8c4686c-866l8" Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.699979 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-6ln9s" Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.719773 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-8cxm8" Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.743235 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-2x88f" Dec 10 15:36:09 crc kubenswrapper[4669]: I1210 15:36:09.928801 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-cgdhp" Dec 10 15:36:10 crc kubenswrapper[4669]: I1210 15:36:10.002167 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-dsxdb" Dec 10 15:36:10 crc kubenswrapper[4669]: I1210 15:36:10.646183 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerStarted","Data":"fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c"} Dec 10 15:36:10 crc kubenswrapper[4669]: I1210 15:36:10.648413 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" event={"ID":"b169689d-8a97-407f-81f7-56497bc77f0b","Type":"ContainerStarted","Data":"0cbbdb2f59c239b436ddd33b5f624a739733916fe8bc51d7e2165f3cbd0a654d"} Dec 10 15:36:10 crc kubenswrapper[4669]: I1210 15:36:10.648624 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:36:10 crc kubenswrapper[4669]: I1210 15:36:10.698148 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" podStartSLOduration=3.640571661 podStartE2EDuration="1m2.698127708s" podCreationTimestamp="2025-12-10 15:35:08 +0000 UTC" firstStartedPulling="2025-12-10 15:35:11.120107332 +0000 UTC m=+885.037053959" lastFinishedPulling="2025-12-10 15:36:10.177663379 +0000 UTC m=+944.094610006" observedRunningTime="2025-12-10 15:36:10.696072786 +0000 UTC m=+944.613019423" watchObservedRunningTime="2025-12-10 15:36:10.698127708 +0000 UTC m=+944.615074335" Dec 10 15:36:11 crc kubenswrapper[4669]: I1210 15:36:11.021624 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lfxdv"] Dec 10 15:36:11 crc kubenswrapper[4669]: I1210 15:36:11.022094 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-lfxdv" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="registry-server" containerID="cri-o://2864b477b23219b46bab5dd378cdc7319518bd3108c9cd827a850a6bd6c97155" gracePeriod=2 Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.434645 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mpnn7"] Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.437820 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.456715 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mpnn7"] Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.520121 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6clh\" (UniqueName: \"kubernetes.io/projected/1deedbe7-fb09-46fa-9797-b5a93fe8e523-kube-api-access-j6clh\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.520181 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-utilities\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.520252 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-catalog-content\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.621496 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6clh\" (UniqueName: \"kubernetes.io/projected/1deedbe7-fb09-46fa-9797-b5a93fe8e523-kube-api-access-j6clh\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.621553 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-utilities\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.621588 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-catalog-content\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.622597 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-utilities\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.642181 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6clh\" (UniqueName: \"kubernetes.io/projected/1deedbe7-fb09-46fa-9797-b5a93fe8e523-kube-api-access-j6clh\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.678969 4669 generic.go:334] "Generic (PLEG): container finished" podID="ed8932d6-297b-44e1-aadd-476321cdc291" containerID="fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c" exitCode=0 Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.679073 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerDied","Data":"fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c"} Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.684271 4669 generic.go:334] "Generic (PLEG): container finished" podID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerID="2864b477b23219b46bab5dd378cdc7319518bd3108c9cd827a850a6bd6c97155" exitCode=0 Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.684310 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerDied","Data":"2864b477b23219b46bab5dd378cdc7319518bd3108c9cd827a850a6bd6c97155"} Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.724324 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-catalog-content\") pod \"redhat-marketplace-mpnn7\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:13 crc kubenswrapper[4669]: I1210 15:36:13.769823 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.304929 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.315965 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mpnn7"] Dec 10 15:36:14 crc kubenswrapper[4669]: W1210 15:36:14.321659 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1deedbe7_fb09_46fa_9797_b5a93fe8e523.slice/crio-191a3c9a5b82b32db8680dc8e2667508fb28002bd15281ede1086b637653c334 WatchSource:0}: Error finding container 191a3c9a5b82b32db8680dc8e2667508fb28002bd15281ede1086b637653c334: Status 404 returned error can't find the container with id 191a3c9a5b82b32db8680dc8e2667508fb28002bd15281ede1086b637653c334 Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.328875 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-njb9d\" (UniqueName: \"kubernetes.io/projected/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-kube-api-access-njb9d\") pod \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.328965 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-utilities\") pod \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.329095 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-catalog-content\") pod \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\" (UID: \"8ca3c1f5-20d1-4402-8fe8-08695e534f2e\") " Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.329742 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-utilities" (OuterVolumeSpecName: "utilities") pod "8ca3c1f5-20d1-4402-8fe8-08695e534f2e" (UID: "8ca3c1f5-20d1-4402-8fe8-08695e534f2e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.344918 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-kube-api-access-njb9d" (OuterVolumeSpecName: "kube-api-access-njb9d") pod "8ca3c1f5-20d1-4402-8fe8-08695e534f2e" (UID: "8ca3c1f5-20d1-4402-8fe8-08695e534f2e"). InnerVolumeSpecName "kube-api-access-njb9d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.387762 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ca3c1f5-20d1-4402-8fe8-08695e534f2e" (UID: "8ca3c1f5-20d1-4402-8fe8-08695e534f2e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.430369 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.430415 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-njb9d\" (UniqueName: \"kubernetes.io/projected/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-kube-api-access-njb9d\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.430427 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ca3c1f5-20d1-4402-8fe8-08695e534f2e-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.694451 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-lfxdv" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.694663 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-lfxdv" event={"ID":"8ca3c1f5-20d1-4402-8fe8-08695e534f2e","Type":"ContainerDied","Data":"efd7bfeb5610bd9b7ed7cb596e4451a772f3c7cff574df664cad92985c1945b9"} Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.694718 4669 scope.go:117] "RemoveContainer" containerID="2864b477b23219b46bab5dd378cdc7319518bd3108c9cd827a850a6bd6c97155" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.696442 4669 generic.go:334] "Generic (PLEG): container finished" podID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerID="1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5" exitCode=0 Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.696620 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mpnn7" event={"ID":"1deedbe7-fb09-46fa-9797-b5a93fe8e523","Type":"ContainerDied","Data":"1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5"} Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.696700 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mpnn7" event={"ID":"1deedbe7-fb09-46fa-9797-b5a93fe8e523","Type":"ContainerStarted","Data":"191a3c9a5b82b32db8680dc8e2667508fb28002bd15281ede1086b637653c334"} Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.702797 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerStarted","Data":"840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b"} Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.711547 4669 scope.go:117] "RemoveContainer" containerID="cc1a486ecade6404a23d485ca33c8f7d288f3229a283c6eca8b8e6699bee4262" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.731354 4669 scope.go:117] "RemoveContainer" containerID="886f62d20b02bc39c8ff08953a107dd8cc2557ac340f7f3cbe2ab2e975ea268b" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.741127 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hczp6" podStartSLOduration=2.261034673 podStartE2EDuration="6.741111212s" podCreationTimestamp="2025-12-10 15:36:08 +0000 UTC" firstStartedPulling="2025-12-10 15:36:09.632763364 +0000 UTC m=+943.549709991" lastFinishedPulling="2025-12-10 15:36:14.112839903 +0000 UTC m=+948.029786530" observedRunningTime="2025-12-10 15:36:14.73549348 +0000 UTC m=+948.652440117" watchObservedRunningTime="2025-12-10 15:36:14.741111212 +0000 UTC m=+948.658057829" Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.761709 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-lfxdv"] Dec 10 15:36:14 crc kubenswrapper[4669]: I1210 15:36:14.769286 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-lfxdv"] Dec 10 15:36:15 crc kubenswrapper[4669]: I1210 15:36:15.709837 4669 generic.go:334] "Generic (PLEG): container finished" podID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerID="035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb" exitCode=0 Dec 10 15:36:15 crc kubenswrapper[4669]: I1210 15:36:15.709906 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mpnn7" event={"ID":"1deedbe7-fb09-46fa-9797-b5a93fe8e523","Type":"ContainerDied","Data":"035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb"} Dec 10 15:36:16 crc kubenswrapper[4669]: I1210 15:36:16.406256 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" path="/var/lib/kubelet/pods/8ca3c1f5-20d1-4402-8fe8-08695e534f2e/volumes" Dec 10 15:36:16 crc kubenswrapper[4669]: I1210 15:36:16.724441 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mpnn7" event={"ID":"1deedbe7-fb09-46fa-9797-b5a93fe8e523","Type":"ContainerStarted","Data":"6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15"} Dec 10 15:36:16 crc kubenswrapper[4669]: I1210 15:36:16.756714 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mpnn7" podStartSLOduration=2.123225882 podStartE2EDuration="3.756692194s" podCreationTimestamp="2025-12-10 15:36:13 +0000 UTC" firstStartedPulling="2025-12-10 15:36:14.699670467 +0000 UTC m=+948.616617084" lastFinishedPulling="2025-12-10 15:36:16.333136719 +0000 UTC m=+950.250083396" observedRunningTime="2025-12-10 15:36:16.753646166 +0000 UTC m=+950.670592813" watchObservedRunningTime="2025-12-10 15:36:16.756692194 +0000 UTC m=+950.673638841" Dec 10 15:36:18 crc kubenswrapper[4669]: I1210 15:36:18.836978 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:18 crc kubenswrapper[4669]: I1210 15:36:18.837036 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:19 crc kubenswrapper[4669]: I1210 15:36:19.860513 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9d58d64bc-n8qx2" Dec 10 15:36:19 crc kubenswrapper[4669]: I1210 15:36:19.897068 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hczp6" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="registry-server" probeResult="failure" output=< Dec 10 15:36:19 crc kubenswrapper[4669]: timeout: failed to connect service ":50051" within 1s Dec 10 15:36:19 crc kubenswrapper[4669]: > Dec 10 15:36:23 crc kubenswrapper[4669]: I1210 15:36:23.770864 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:23 crc kubenswrapper[4669]: I1210 15:36:23.771502 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:23 crc kubenswrapper[4669]: I1210 15:36:23.825935 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:24 crc kubenswrapper[4669]: I1210 15:36:24.842854 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:24 crc kubenswrapper[4669]: I1210 15:36:24.889197 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mpnn7"] Dec 10 15:36:26 crc kubenswrapper[4669]: I1210 15:36:26.808289 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mpnn7" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="registry-server" containerID="cri-o://6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15" gracePeriod=2 Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.732904 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.816003 4669 generic.go:334] "Generic (PLEG): container finished" podID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerID="6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15" exitCode=0 Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.816042 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mpnn7" event={"ID":"1deedbe7-fb09-46fa-9797-b5a93fe8e523","Type":"ContainerDied","Data":"6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15"} Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.816078 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mpnn7" event={"ID":"1deedbe7-fb09-46fa-9797-b5a93fe8e523","Type":"ContainerDied","Data":"191a3c9a5b82b32db8680dc8e2667508fb28002bd15281ede1086b637653c334"} Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.816079 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mpnn7" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.816094 4669 scope.go:117] "RemoveContainer" containerID="6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.832313 4669 scope.go:117] "RemoveContainer" containerID="035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.849759 4669 scope.go:117] "RemoveContainer" containerID="1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.869919 4669 scope.go:117] "RemoveContainer" containerID="6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15" Dec 10 15:36:27 crc kubenswrapper[4669]: E1210 15:36:27.870419 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15\": container with ID starting with 6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15 not found: ID does not exist" containerID="6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.870459 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15"} err="failed to get container status \"6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15\": rpc error: code = NotFound desc = could not find container \"6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15\": container with ID starting with 6c54bb3738f02f545496195ba6232e5fa03f2a544f483e49ec578e03f727ea15 not found: ID does not exist" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.870481 4669 scope.go:117] "RemoveContainer" containerID="035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb" Dec 10 15:36:27 crc kubenswrapper[4669]: E1210 15:36:27.870861 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb\": container with ID starting with 035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb not found: ID does not exist" containerID="035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.870889 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb"} err="failed to get container status \"035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb\": rpc error: code = NotFound desc = could not find container \"035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb\": container with ID starting with 035bb766f9041985c0d1592dd285ff7ac53848ca129afb4578add0bc3d5667cb not found: ID does not exist" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.870903 4669 scope.go:117] "RemoveContainer" containerID="1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5" Dec 10 15:36:27 crc kubenswrapper[4669]: E1210 15:36:27.871195 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5\": container with ID starting with 1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5 not found: ID does not exist" containerID="1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.871251 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5"} err="failed to get container status \"1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5\": rpc error: code = NotFound desc = could not find container \"1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5\": container with ID starting with 1285b060ce98b830b16ad15baed122cecb781fb05324929e968d66bef86d5ce5 not found: ID does not exist" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.905180 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6clh\" (UniqueName: \"kubernetes.io/projected/1deedbe7-fb09-46fa-9797-b5a93fe8e523-kube-api-access-j6clh\") pod \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.905252 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-catalog-content\") pod \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.905303 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-utilities\") pod \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\" (UID: \"1deedbe7-fb09-46fa-9797-b5a93fe8e523\") " Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.906154 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-utilities" (OuterVolumeSpecName: "utilities") pod "1deedbe7-fb09-46fa-9797-b5a93fe8e523" (UID: "1deedbe7-fb09-46fa-9797-b5a93fe8e523"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.912307 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1deedbe7-fb09-46fa-9797-b5a93fe8e523-kube-api-access-j6clh" (OuterVolumeSpecName: "kube-api-access-j6clh") pod "1deedbe7-fb09-46fa-9797-b5a93fe8e523" (UID: "1deedbe7-fb09-46fa-9797-b5a93fe8e523"). InnerVolumeSpecName "kube-api-access-j6clh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:36:27 crc kubenswrapper[4669]: I1210 15:36:27.927743 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1deedbe7-fb09-46fa-9797-b5a93fe8e523" (UID: "1deedbe7-fb09-46fa-9797-b5a93fe8e523"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.007175 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6clh\" (UniqueName: \"kubernetes.io/projected/1deedbe7-fb09-46fa-9797-b5a93fe8e523-kube-api-access-j6clh\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.007445 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.007529 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1deedbe7-fb09-46fa-9797-b5a93fe8e523-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.159328 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mpnn7"] Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.165769 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mpnn7"] Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.405390 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" path="/var/lib/kubelet/pods/1deedbe7-fb09-46fa-9797-b5a93fe8e523/volumes" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.745451 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.745945 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.888237 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:28 crc kubenswrapper[4669]: I1210 15:36:28.945282 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:29 crc kubenswrapper[4669]: I1210 15:36:29.971049 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hczp6"] Dec 10 15:36:30 crc kubenswrapper[4669]: I1210 15:36:30.839472 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hczp6" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="registry-server" containerID="cri-o://840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b" gracePeriod=2 Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.252927 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.354075 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4nbz\" (UniqueName: \"kubernetes.io/projected/ed8932d6-297b-44e1-aadd-476321cdc291-kube-api-access-d4nbz\") pod \"ed8932d6-297b-44e1-aadd-476321cdc291\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.354743 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-catalog-content\") pod \"ed8932d6-297b-44e1-aadd-476321cdc291\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.354774 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-utilities\") pod \"ed8932d6-297b-44e1-aadd-476321cdc291\" (UID: \"ed8932d6-297b-44e1-aadd-476321cdc291\") " Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.355686 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-utilities" (OuterVolumeSpecName: "utilities") pod "ed8932d6-297b-44e1-aadd-476321cdc291" (UID: "ed8932d6-297b-44e1-aadd-476321cdc291"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.361317 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed8932d6-297b-44e1-aadd-476321cdc291-kube-api-access-d4nbz" (OuterVolumeSpecName: "kube-api-access-d4nbz") pod "ed8932d6-297b-44e1-aadd-476321cdc291" (UID: "ed8932d6-297b-44e1-aadd-476321cdc291"). InnerVolumeSpecName "kube-api-access-d4nbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.456590 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.456666 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4nbz\" (UniqueName: \"kubernetes.io/projected/ed8932d6-297b-44e1-aadd-476321cdc291-kube-api-access-d4nbz\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.494034 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ed8932d6-297b-44e1-aadd-476321cdc291" (UID: "ed8932d6-297b-44e1-aadd-476321cdc291"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.559028 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ed8932d6-297b-44e1-aadd-476321cdc291-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.848134 4669 generic.go:334] "Generic (PLEG): container finished" podID="ed8932d6-297b-44e1-aadd-476321cdc291" containerID="840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b" exitCode=0 Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.848295 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerDied","Data":"840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b"} Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.848689 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hczp6" event={"ID":"ed8932d6-297b-44e1-aadd-476321cdc291","Type":"ContainerDied","Data":"e509de7962c907e66652ee11d6cbc051c1c77241ef40e4b96640a4b880f123a2"} Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.848715 4669 scope.go:117] "RemoveContainer" containerID="840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.848402 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hczp6" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.870469 4669 scope.go:117] "RemoveContainer" containerID="fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.897402 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hczp6"] Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.901901 4669 scope.go:117] "RemoveContainer" containerID="78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.902238 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hczp6"] Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.918063 4669 scope.go:117] "RemoveContainer" containerID="840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b" Dec 10 15:36:31 crc kubenswrapper[4669]: E1210 15:36:31.918513 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b\": container with ID starting with 840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b not found: ID does not exist" containerID="840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.918544 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b"} err="failed to get container status \"840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b\": rpc error: code = NotFound desc = could not find container \"840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b\": container with ID starting with 840f14d4c326c28cfb12ff5c33588ed16149a4c3d89edf17eadf5ba04e11e11b not found: ID does not exist" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.918568 4669 scope.go:117] "RemoveContainer" containerID="fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c" Dec 10 15:36:31 crc kubenswrapper[4669]: E1210 15:36:31.919007 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c\": container with ID starting with fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c not found: ID does not exist" containerID="fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.919032 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c"} err="failed to get container status \"fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c\": rpc error: code = NotFound desc = could not find container \"fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c\": container with ID starting with fca2aeaa151f4618b8168ecf72f36912c00d62a0994c5af22627e2e4aff1ca4c not found: ID does not exist" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.919049 4669 scope.go:117] "RemoveContainer" containerID="78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c" Dec 10 15:36:31 crc kubenswrapper[4669]: E1210 15:36:31.919464 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c\": container with ID starting with 78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c not found: ID does not exist" containerID="78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c" Dec 10 15:36:31 crc kubenswrapper[4669]: I1210 15:36:31.919537 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c"} err="failed to get container status \"78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c\": rpc error: code = NotFound desc = could not find container \"78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c\": container with ID starting with 78b2e76ff060e61d9f1aa3cd3d3e4d082713c0bb9cd0de56123c7889245ff15c not found: ID does not exist" Dec 10 15:36:32 crc kubenswrapper[4669]: I1210 15:36:32.405792 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" path="/var/lib/kubelet/pods/ed8932d6-297b-44e1-aadd-476321cdc291/volumes" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.380436 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zjp7c"] Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381196 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381236 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381287 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381299 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381320 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="extract-content" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381331 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="extract-content" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381358 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="extract-utilities" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381371 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="extract-utilities" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381400 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="extract-utilities" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381409 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="extract-utilities" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381426 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="extract-content" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381436 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="extract-content" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381458 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381467 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381479 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="extract-utilities" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381488 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="extract-utilities" Dec 10 15:36:34 crc kubenswrapper[4669]: E1210 15:36:34.381503 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="extract-content" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381513 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="extract-content" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381766 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed8932d6-297b-44e1-aadd-476321cdc291" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381785 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1deedbe7-fb09-46fa-9797-b5a93fe8e523" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.381805 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ca3c1f5-20d1-4402-8fe8-08695e534f2e" containerName="registry-server" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.383532 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.396516 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zjp7c"] Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.503796 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-utilities\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.503841 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-catalog-content\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.503908 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cpjr6\" (UniqueName: \"kubernetes.io/projected/8c517075-3faf-4a64-825f-eef1eeb67174-kube-api-access-cpjr6\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.605205 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-utilities\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.605280 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-catalog-content\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.605368 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cpjr6\" (UniqueName: \"kubernetes.io/projected/8c517075-3faf-4a64-825f-eef1eeb67174-kube-api-access-cpjr6\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.605826 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-utilities\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.605894 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-catalog-content\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.629352 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cpjr6\" (UniqueName: \"kubernetes.io/projected/8c517075-3faf-4a64-825f-eef1eeb67174-kube-api-access-cpjr6\") pod \"community-operators-zjp7c\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:34 crc kubenswrapper[4669]: I1210 15:36:34.745163 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:35 crc kubenswrapper[4669]: I1210 15:36:35.219750 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zjp7c"] Dec 10 15:36:35 crc kubenswrapper[4669]: W1210 15:36:35.224467 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c517075_3faf_4a64_825f_eef1eeb67174.slice/crio-da36b6895623c257d2c047b6dd4a9db5affc74afc5bf5a5ab4ae73a0c3549c0c WatchSource:0}: Error finding container da36b6895623c257d2c047b6dd4a9db5affc74afc5bf5a5ab4ae73a0c3549c0c: Status 404 returned error can't find the container with id da36b6895623c257d2c047b6dd4a9db5affc74afc5bf5a5ab4ae73a0c3549c0c Dec 10 15:36:35 crc kubenswrapper[4669]: I1210 15:36:35.893738 4669 generic.go:334] "Generic (PLEG): container finished" podID="8c517075-3faf-4a64-825f-eef1eeb67174" containerID="5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9" exitCode=0 Dec 10 15:36:35 crc kubenswrapper[4669]: I1210 15:36:35.893814 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerDied","Data":"5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9"} Dec 10 15:36:35 crc kubenswrapper[4669]: I1210 15:36:35.894177 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerStarted","Data":"da36b6895623c257d2c047b6dd4a9db5affc74afc5bf5a5ab4ae73a0c3549c0c"} Dec 10 15:36:36 crc kubenswrapper[4669]: I1210 15:36:36.902510 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerStarted","Data":"e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8"} Dec 10 15:36:37 crc kubenswrapper[4669]: I1210 15:36:37.913949 4669 generic.go:334] "Generic (PLEG): container finished" podID="8c517075-3faf-4a64-825f-eef1eeb67174" containerID="e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8" exitCode=0 Dec 10 15:36:37 crc kubenswrapper[4669]: I1210 15:36:37.914055 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerDied","Data":"e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8"} Dec 10 15:36:39 crc kubenswrapper[4669]: I1210 15:36:39.932152 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerStarted","Data":"8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962"} Dec 10 15:36:39 crc kubenswrapper[4669]: I1210 15:36:39.952377 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zjp7c" podStartSLOduration=3.022254335 podStartE2EDuration="5.952354672s" podCreationTimestamp="2025-12-10 15:36:34 +0000 UTC" firstStartedPulling="2025-12-10 15:36:35.895823566 +0000 UTC m=+969.812770203" lastFinishedPulling="2025-12-10 15:36:38.825923913 +0000 UTC m=+972.742870540" observedRunningTime="2025-12-10 15:36:39.946946377 +0000 UTC m=+973.863893034" watchObservedRunningTime="2025-12-10 15:36:39.952354672 +0000 UTC m=+973.869301299" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.656557 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zz5x5"] Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.664563 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.674125 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.674340 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-jxdr8" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.674507 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.674128 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.686810 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zz5x5"] Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.731276 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4bqxl"] Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.734277 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.737388 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.739910 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-config\") pod \"dnsmasq-dns-675f4bcbfc-zz5x5\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.740003 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grkxr\" (UniqueName: \"kubernetes.io/projected/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-kube-api-access-grkxr\") pod \"dnsmasq-dns-675f4bcbfc-zz5x5\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.742754 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4bqxl"] Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.842207 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-config\") pod \"dnsmasq-dns-675f4bcbfc-zz5x5\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.842256 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-config\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.842280 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77hgh\" (UniqueName: \"kubernetes.io/projected/721f9831-e2ee-4f78-a943-a19a0d6cdeab-kube-api-access-77hgh\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.842311 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grkxr\" (UniqueName: \"kubernetes.io/projected/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-kube-api-access-grkxr\") pod \"dnsmasq-dns-675f4bcbfc-zz5x5\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.842395 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.843028 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-config\") pod \"dnsmasq-dns-675f4bcbfc-zz5x5\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.875482 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grkxr\" (UniqueName: \"kubernetes.io/projected/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-kube-api-access-grkxr\") pod \"dnsmasq-dns-675f4bcbfc-zz5x5\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.943592 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-config\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.943656 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77hgh\" (UniqueName: \"kubernetes.io/projected/721f9831-e2ee-4f78-a943-a19a0d6cdeab-kube-api-access-77hgh\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.943722 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.944687 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.944716 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-config\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.965515 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77hgh\" (UniqueName: \"kubernetes.io/projected/721f9831-e2ee-4f78-a943-a19a0d6cdeab-kube-api-access-77hgh\") pod \"dnsmasq-dns-78dd6ddcc-4bqxl\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:43 crc kubenswrapper[4669]: I1210 15:36:43.994551 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.050379 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.437790 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zz5x5"] Dec 10 15:36:44 crc kubenswrapper[4669]: W1210 15:36:44.506165 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod721f9831_e2ee_4f78_a943_a19a0d6cdeab.slice/crio-32becb2fd3b6b49fb4678b12285fa7823ef6bd354b336653f7621b17b3865ed7 WatchSource:0}: Error finding container 32becb2fd3b6b49fb4678b12285fa7823ef6bd354b336653f7621b17b3865ed7: Status 404 returned error can't find the container with id 32becb2fd3b6b49fb4678b12285fa7823ef6bd354b336653f7621b17b3865ed7 Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.506663 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4bqxl"] Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.746306 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.747730 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.788425 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.963423 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" event={"ID":"8ebba8c4-fe89-43dd-a8fd-2be0298a5622","Type":"ContainerStarted","Data":"dcc58f84d135886f656336316c1611ed84002014e80d2bedcf8e3145fdd356eb"} Dec 10 15:36:44 crc kubenswrapper[4669]: I1210 15:36:44.964802 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" event={"ID":"721f9831-e2ee-4f78-a943-a19a0d6cdeab","Type":"ContainerStarted","Data":"32becb2fd3b6b49fb4678b12285fa7823ef6bd354b336653f7621b17b3865ed7"} Dec 10 15:36:45 crc kubenswrapper[4669]: I1210 15:36:45.013526 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:45 crc kubenswrapper[4669]: I1210 15:36:45.066275 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zjp7c"] Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.712266 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zz5x5"] Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.736564 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-mnzm4"] Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.738049 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.755874 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-mnzm4"] Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.805954 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gd8w\" (UniqueName: \"kubernetes.io/projected/76773d12-e582-4a80-bad2-f1d8924c7ce0-kube-api-access-5gd8w\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.806022 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-dns-svc\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.806046 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-config\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.907754 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gd8w\" (UniqueName: \"kubernetes.io/projected/76773d12-e582-4a80-bad2-f1d8924c7ce0-kube-api-access-5gd8w\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.907839 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-dns-svc\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.907864 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-config\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.908782 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-config\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.909150 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-dns-svc\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.950184 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gd8w\" (UniqueName: \"kubernetes.io/projected/76773d12-e582-4a80-bad2-f1d8924c7ce0-kube-api-access-5gd8w\") pod \"dnsmasq-dns-666b6646f7-mnzm4\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:46 crc kubenswrapper[4669]: I1210 15:36:46.991180 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zjp7c" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="registry-server" containerID="cri-o://8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962" gracePeriod=2 Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.073572 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.121952 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4bqxl"] Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.153172 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2gn25"] Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.154861 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.172438 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2gn25"] Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.233198 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smg2k\" (UniqueName: \"kubernetes.io/projected/252bdb2d-5492-4583-872d-50dd50e34984-kube-api-access-smg2k\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.233259 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.233350 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-config\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.339657 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smg2k\" (UniqueName: \"kubernetes.io/projected/252bdb2d-5492-4583-872d-50dd50e34984-kube-api-access-smg2k\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.339704 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.339762 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-config\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.340696 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-config\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.340724 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.367859 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smg2k\" (UniqueName: \"kubernetes.io/projected/252bdb2d-5492-4583-872d-50dd50e34984-kube-api-access-smg2k\") pod \"dnsmasq-dns-57d769cc4f-2gn25\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.505020 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.656340 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.745261 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cpjr6\" (UniqueName: \"kubernetes.io/projected/8c517075-3faf-4a64-825f-eef1eeb67174-kube-api-access-cpjr6\") pod \"8c517075-3faf-4a64-825f-eef1eeb67174\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.745317 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-utilities\") pod \"8c517075-3faf-4a64-825f-eef1eeb67174\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.745411 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-catalog-content\") pod \"8c517075-3faf-4a64-825f-eef1eeb67174\" (UID: \"8c517075-3faf-4a64-825f-eef1eeb67174\") " Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.748656 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-utilities" (OuterVolumeSpecName: "utilities") pod "8c517075-3faf-4a64-825f-eef1eeb67174" (UID: "8c517075-3faf-4a64-825f-eef1eeb67174"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.751450 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c517075-3faf-4a64-825f-eef1eeb67174-kube-api-access-cpjr6" (OuterVolumeSpecName: "kube-api-access-cpjr6") pod "8c517075-3faf-4a64-825f-eef1eeb67174" (UID: "8c517075-3faf-4a64-825f-eef1eeb67174"). InnerVolumeSpecName "kube-api-access-cpjr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.819855 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-mnzm4"] Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.831074 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8c517075-3faf-4a64-825f-eef1eeb67174" (UID: "8c517075-3faf-4a64-825f-eef1eeb67174"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.847584 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.847615 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8c517075-3faf-4a64-825f-eef1eeb67174-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.847626 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cpjr6\" (UniqueName: \"kubernetes.io/projected/8c517075-3faf-4a64-825f-eef1eeb67174-kube-api-access-cpjr6\") on node \"crc\" DevicePath \"\"" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.898537 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:36:47 crc kubenswrapper[4669]: E1210 15:36:47.898812 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="extract-utilities" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.898833 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="extract-utilities" Dec 10 15:36:47 crc kubenswrapper[4669]: E1210 15:36:47.898855 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="registry-server" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.898863 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="registry-server" Dec 10 15:36:47 crc kubenswrapper[4669]: E1210 15:36:47.898885 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="extract-content" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.898893 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="extract-content" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.899134 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" containerName="registry-server" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.900089 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.903461 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.906683 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.906940 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.906964 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wcsm7" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.907154 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.907378 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.907541 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 10 15:36:47 crc kubenswrapper[4669]: I1210 15:36:47.931199 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.010548 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" event={"ID":"76773d12-e582-4a80-bad2-f1d8924c7ce0","Type":"ContainerStarted","Data":"b0b817f1ae49ffd038372786f5494b7a0711c6bd831be8d0fda8c0932399908e"} Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.013799 4669 generic.go:334] "Generic (PLEG): container finished" podID="8c517075-3faf-4a64-825f-eef1eeb67174" containerID="8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962" exitCode=0 Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.013830 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerDied","Data":"8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962"} Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.013982 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zjp7c" event={"ID":"8c517075-3faf-4a64-825f-eef1eeb67174","Type":"ContainerDied","Data":"da36b6895623c257d2c047b6dd4a9db5affc74afc5bf5a5ab4ae73a0c3549c0c"} Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.013959 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zjp7c" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.014096 4669 scope.go:117] "RemoveContainer" containerID="8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.052707 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.052750 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.052825 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053276 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b359c954-51b4-401c-a783-f0220d650a4b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053310 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053328 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b359c954-51b4-401c-a783-f0220d650a4b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053347 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-config-data\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053361 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fvq6\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-kube-api-access-6fvq6\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053419 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053440 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053490 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.053880 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2gn25"] Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.059419 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zjp7c"] Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.064997 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zjp7c"] Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.071621 4669 scope.go:117] "RemoveContainer" containerID="e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.138056 4669 scope.go:117] "RemoveContainer" containerID="5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154385 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154434 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b359c954-51b4-401c-a783-f0220d650a4b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154460 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154484 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b359c954-51b4-401c-a783-f0220d650a4b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154500 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-config-data\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154514 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fvq6\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-kube-api-access-6fvq6\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154551 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154569 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154586 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154604 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154620 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.154974 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.156353 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.156418 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.156992 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.157282 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-config-data\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.158021 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-server-conf\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.168440 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b359c954-51b4-401c-a783-f0220d650a4b-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.169042 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b359c954-51b4-401c-a783-f0220d650a4b-pod-info\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.170711 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.173281 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.176061 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fvq6\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-kube-api-access-6fvq6\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.178674 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.185401 4669 scope.go:117] "RemoveContainer" containerID="8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962" Dec 10 15:36:48 crc kubenswrapper[4669]: E1210 15:36:48.185923 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962\": container with ID starting with 8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962 not found: ID does not exist" containerID="8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.185961 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962"} err="failed to get container status \"8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962\": rpc error: code = NotFound desc = could not find container \"8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962\": container with ID starting with 8407e4ec3c7b53efe5419bd13ff1af942d7dfb2784b86042735f257b0d4b9962 not found: ID does not exist" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.185993 4669 scope.go:117] "RemoveContainer" containerID="e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8" Dec 10 15:36:48 crc kubenswrapper[4669]: E1210 15:36:48.187319 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8\": container with ID starting with e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8 not found: ID does not exist" containerID="e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.187354 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8"} err="failed to get container status \"e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8\": rpc error: code = NotFound desc = could not find container \"e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8\": container with ID starting with e6ecb658677e5019fc0244f3d3449cd74a9485aad27128f01a3f46aea2685ba8 not found: ID does not exist" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.187375 4669 scope.go:117] "RemoveContainer" containerID="5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9" Dec 10 15:36:48 crc kubenswrapper[4669]: E1210 15:36:48.189612 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9\": container with ID starting with 5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9 not found: ID does not exist" containerID="5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.189644 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9"} err="failed to get container status \"5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9\": rpc error: code = NotFound desc = could not find container \"5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9\": container with ID starting with 5b7bc2c198d1ae5382a28c44cb598a85bb626e497d9c06b0d18ca35b00e751b9 not found: ID does not exist" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.233443 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.286598 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.288015 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.329656 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.330690 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.332585 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.332699 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.332789 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.333153 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.333675 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-6jbw2" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.337352 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358038 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358116 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7714ed30-3730-4a63-8d4d-2b7e097cadbc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358148 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358207 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358282 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358360 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358395 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7714ed30-3730-4a63-8d4d-2b7e097cadbc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358451 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358489 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358522 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qx565\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-kube-api-access-qx565\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.358559 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.407717 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c517075-3faf-4a64-825f-eef1eeb67174" path="/var/lib/kubelet/pods/8c517075-3faf-4a64-825f-eef1eeb67174/volumes" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.460917 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461231 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461254 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7714ed30-3730-4a63-8d4d-2b7e097cadbc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461280 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461322 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461375 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461417 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461433 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7714ed30-3730-4a63-8d4d-2b7e097cadbc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461458 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461481 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461504 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qx565\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-kube-api-access-qx565\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461775 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.461837 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.462646 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.463656 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.463841 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.464519 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.466440 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.467409 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7714ed30-3730-4a63-8d4d-2b7e097cadbc-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.472534 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.477356 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7714ed30-3730-4a63-8d4d-2b7e097cadbc-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.486872 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qx565\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-kube-api-access-qx565\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.529576 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.652598 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:36:48 crc kubenswrapper[4669]: I1210 15:36:48.781156 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:36:48 crc kubenswrapper[4669]: W1210 15:36:48.799591 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb359c954_51b4_401c_a783_f0220d650a4b.slice/crio-0057ab444d353c6d2c73fdcfcfcc99b8de2597e058a409108b9a86254ffd5254 WatchSource:0}: Error finding container 0057ab444d353c6d2c73fdcfcfcc99b8de2597e058a409108b9a86254ffd5254: Status 404 returned error can't find the container with id 0057ab444d353c6d2c73fdcfcfcc99b8de2597e058a409108b9a86254ffd5254 Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.055284 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" event={"ID":"252bdb2d-5492-4583-872d-50dd50e34984","Type":"ContainerStarted","Data":"029fff73296eefd642482c2d87b4c69b75ec7d3d7a1993a26fc31d24ed022d52"} Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.093270 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b359c954-51b4-401c-a783-f0220d650a4b","Type":"ContainerStarted","Data":"0057ab444d353c6d2c73fdcfcfcc99b8de2597e058a409108b9a86254ffd5254"} Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.235634 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.838393 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.840472 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.844314 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-k4hl8" Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.844393 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.858055 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.859244 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.861353 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 10 15:36:49 crc kubenswrapper[4669]: I1210 15:36:49.864407 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.021706 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-config-data-default\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.021824 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7873e2f3-ba85-4e59-8866-dab32f5604c4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.022338 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-kolla-config\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.022413 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7873e2f3-ba85-4e59-8866-dab32f5604c4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.022470 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.022491 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7873e2f3-ba85-4e59-8866-dab32f5604c4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.022516 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxttc\" (UniqueName: \"kubernetes.io/projected/7873e2f3-ba85-4e59-8866-dab32f5604c4-kube-api-access-hxttc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.022588 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.118838 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7714ed30-3730-4a63-8d4d-2b7e097cadbc","Type":"ContainerStarted","Data":"916bc480292e92185f21c1c0aefa4c60cb12aa00d2e694dfa64161240b1abd88"} Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124432 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7873e2f3-ba85-4e59-8866-dab32f5604c4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124568 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124640 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7873e2f3-ba85-4e59-8866-dab32f5604c4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124660 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxttc\" (UniqueName: \"kubernetes.io/projected/7873e2f3-ba85-4e59-8866-dab32f5604c4-kube-api-access-hxttc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124686 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124732 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-config-data-default\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124746 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7873e2f3-ba85-4e59-8866-dab32f5604c4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.124768 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-kolla-config\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.125373 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.125831 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-kolla-config\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.126610 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-config-data-default\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.126979 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7873e2f3-ba85-4e59-8866-dab32f5604c4-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.127484 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7873e2f3-ba85-4e59-8866-dab32f5604c4-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.150972 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7873e2f3-ba85-4e59-8866-dab32f5604c4-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.168960 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxttc\" (UniqueName: \"kubernetes.io/projected/7873e2f3-ba85-4e59-8866-dab32f5604c4-kube-api-access-hxttc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.180365 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.182771 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7873e2f3-ba85-4e59-8866-dab32f5604c4-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7873e2f3-ba85-4e59-8866-dab32f5604c4\") " pod="openstack/openstack-galera-0" Dec 10 15:36:50 crc kubenswrapper[4669]: I1210 15:36:50.473146 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.170855 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.273790 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.275809 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.278935 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.279205 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-2mlvn" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.282648 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.300640 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.303141 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 15:36:51 crc kubenswrapper[4669]: W1210 15:36:51.341638 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7873e2f3_ba85_4e59_8866_dab32f5604c4.slice/crio-2da7eee15074648dad02039044857afafac7374873a616d71f3b762166fd8c39 WatchSource:0}: Error finding container 2da7eee15074648dad02039044857afafac7374873a616d71f3b762166fd8c39: Status 404 returned error can't find the container with id 2da7eee15074648dad02039044857afafac7374873a616d71f3b762166fd8c39 Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.347954 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.347998 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970268b9-aefe-4481-a415-94b74a1de83c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.348029 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.348062 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.348090 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.348126 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/970268b9-aefe-4481-a415-94b74a1de83c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.348149 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/970268b9-aefe-4481-a415-94b74a1de83c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.348171 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rnjlx\" (UniqueName: \"kubernetes.io/projected/970268b9-aefe-4481-a415-94b74a1de83c-kube-api-access-rnjlx\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.449883 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.449941 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970268b9-aefe-4481-a415-94b74a1de83c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.449967 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.449997 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450026 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450071 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/970268b9-aefe-4481-a415-94b74a1de83c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450096 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/970268b9-aefe-4481-a415-94b74a1de83c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450117 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rnjlx\" (UniqueName: \"kubernetes.io/projected/970268b9-aefe-4481-a415-94b74a1de83c-kube-api-access-rnjlx\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450718 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450735 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/970268b9-aefe-4481-a415-94b74a1de83c-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.450777 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.451847 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.452551 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/970268b9-aefe-4481-a415-94b74a1de83c-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.480899 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/970268b9-aefe-4481-a415-94b74a1de83c-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.485579 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.486945 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rnjlx\" (UniqueName: \"kubernetes.io/projected/970268b9-aefe-4481-a415-94b74a1de83c-kube-api-access-rnjlx\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.500294 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/970268b9-aefe-4481-a415-94b74a1de83c-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"970268b9-aefe-4481-a415-94b74a1de83c\") " pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.605983 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.608590 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.608980 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.615606 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.615787 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.615917 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-9kpsq" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.640959 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.655443 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/66f18088-3aa3-4dfc-b85b-9289f133a199-memcached-tls-certs\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.655506 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f18088-3aa3-4dfc-b85b-9289f133a199-combined-ca-bundle\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.655546 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/66f18088-3aa3-4dfc-b85b-9289f133a199-kolla-config\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.655577 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66f18088-3aa3-4dfc-b85b-9289f133a199-config-data\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.655594 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2n595\" (UniqueName: \"kubernetes.io/projected/66f18088-3aa3-4dfc-b85b-9289f133a199-kube-api-access-2n595\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.757498 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f18088-3aa3-4dfc-b85b-9289f133a199-combined-ca-bundle\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.757597 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/66f18088-3aa3-4dfc-b85b-9289f133a199-kolla-config\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.757660 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66f18088-3aa3-4dfc-b85b-9289f133a199-config-data\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.757683 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2n595\" (UniqueName: \"kubernetes.io/projected/66f18088-3aa3-4dfc-b85b-9289f133a199-kube-api-access-2n595\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.757812 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/66f18088-3aa3-4dfc-b85b-9289f133a199-memcached-tls-certs\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.759520 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/66f18088-3aa3-4dfc-b85b-9289f133a199-config-data\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.759963 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/66f18088-3aa3-4dfc-b85b-9289f133a199-kolla-config\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.788205 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/66f18088-3aa3-4dfc-b85b-9289f133a199-combined-ca-bundle\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.789503 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/66f18088-3aa3-4dfc-b85b-9289f133a199-memcached-tls-certs\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.802295 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2n595\" (UniqueName: \"kubernetes.io/projected/66f18088-3aa3-4dfc-b85b-9289f133a199-kube-api-access-2n595\") pod \"memcached-0\" (UID: \"66f18088-3aa3-4dfc-b85b-9289f133a199\") " pod="openstack/memcached-0" Dec 10 15:36:51 crc kubenswrapper[4669]: I1210 15:36:51.982676 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 10 15:36:52 crc kubenswrapper[4669]: I1210 15:36:52.213031 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7873e2f3-ba85-4e59-8866-dab32f5604c4","Type":"ContainerStarted","Data":"2da7eee15074648dad02039044857afafac7374873a616d71f3b762166fd8c39"} Dec 10 15:36:52 crc kubenswrapper[4669]: I1210 15:36:52.669645 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.013174 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.228250 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"970268b9-aefe-4481-a415-94b74a1de83c","Type":"ContainerStarted","Data":"e46a840832979a78d0d6eedd00193329cded7b7cd7e58742f5fd262e9008c3ae"} Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.511257 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.512113 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.514496 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-xc2sn" Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.523804 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p5d9f\" (UniqueName: \"kubernetes.io/projected/c20308b7-707d-45bc-bda1-b33edf9e2d09-kube-api-access-p5d9f\") pod \"kube-state-metrics-0\" (UID: \"c20308b7-707d-45bc-bda1-b33edf9e2d09\") " pod="openstack/kube-state-metrics-0" Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.524954 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.625154 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p5d9f\" (UniqueName: \"kubernetes.io/projected/c20308b7-707d-45bc-bda1-b33edf9e2d09-kube-api-access-p5d9f\") pod \"kube-state-metrics-0\" (UID: \"c20308b7-707d-45bc-bda1-b33edf9e2d09\") " pod="openstack/kube-state-metrics-0" Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.665407 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p5d9f\" (UniqueName: \"kubernetes.io/projected/c20308b7-707d-45bc-bda1-b33edf9e2d09-kube-api-access-p5d9f\") pod \"kube-state-metrics-0\" (UID: \"c20308b7-707d-45bc-bda1-b33edf9e2d09\") " pod="openstack/kube-state-metrics-0" Dec 10 15:36:53 crc kubenswrapper[4669]: I1210 15:36:53.843599 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.707567 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.709347 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.718566 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.718751 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 10 15:36:57 crc kubenswrapper[4669]: W1210 15:36:57.718873 4669 reflector.go:561] object-"openstack"/"cert-ovndbcluster-nb-ovndbs": failed to list *v1.Secret: secrets "cert-ovndbcluster-nb-ovndbs" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 10 15:36:57 crc kubenswrapper[4669]: E1210 15:36:57.718905 4669 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"cert-ovndbcluster-nb-ovndbs\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cert-ovndbcluster-nb-ovndbs\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.718950 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-49bj6" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.719073 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.731789 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.829162 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.829872 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.829938 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d18be2e-49c2-413b-87d3-c76d505b482b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.829964 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.829989 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d18be2e-49c2-413b-87d3-c76d505b482b-config\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.830013 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.830043 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27dqc\" (UniqueName: \"kubernetes.io/projected/8d18be2e-49c2-413b-87d3-c76d505b482b-kube-api-access-27dqc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.830144 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.904956 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j9nmw"] Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.905894 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.912352 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.912621 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-vll6d" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.913621 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.934537 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j9nmw"] Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937665 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937748 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937800 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937872 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d18be2e-49c2-413b-87d3-c76d505b482b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937900 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937929 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d18be2e-49c2-413b-87d3-c76d505b482b-config\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937955 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.937987 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27dqc\" (UniqueName: \"kubernetes.io/projected/8d18be2e-49c2-413b-87d3-c76d505b482b-kube-api-access-27dqc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.939032 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.941631 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.941698 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8d18be2e-49c2-413b-87d3-c76d505b482b-config\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.942232 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8d18be2e-49c2-413b-87d3-c76d505b482b-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.955943 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.970241 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-bbqvq"] Dec 10 15:36:57 crc kubenswrapper[4669]: I1210 15:36:57.984465 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.000430 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.005362 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27dqc\" (UniqueName: \"kubernetes.io/projected/8d18be2e-49c2-413b-87d3-c76d505b482b-kube-api-access-27dqc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041094 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f79f439d-6ac0-4ebc-8ac8-1023ec207254-combined-ca-bundle\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041168 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f79f439d-6ac0-4ebc-8ac8-1023ec207254-ovn-controller-tls-certs\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041227 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f79f439d-6ac0-4ebc-8ac8-1023ec207254-scripts\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041252 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-run\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041303 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-run-ovn\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041326 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs8f6\" (UniqueName: \"kubernetes.io/projected/f79f439d-6ac0-4ebc-8ac8-1023ec207254-kube-api-access-gs8f6\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.041360 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-log-ovn\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.048479 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.088680 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bbqvq"] Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.142947 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-log\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143002 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-run\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143031 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f79f439d-6ac0-4ebc-8ac8-1023ec207254-scripts\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143056 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-run\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143075 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-etc-ovs\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143106 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-run-ovn\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143124 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-scripts\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143143 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs8f6\" (UniqueName: \"kubernetes.io/projected/f79f439d-6ac0-4ebc-8ac8-1023ec207254-kube-api-access-gs8f6\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143176 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-log-ovn\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.143722 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-log-ovn\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.145474 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/f79f439d-6ac0-4ebc-8ac8-1023ec207254-scripts\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.145598 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-run\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.145681 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/f79f439d-6ac0-4ebc-8ac8-1023ec207254-var-run-ovn\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.145952 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-289hq\" (UniqueName: \"kubernetes.io/projected/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-kube-api-access-289hq\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.145986 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f79f439d-6ac0-4ebc-8ac8-1023ec207254-combined-ca-bundle\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.146012 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-lib\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.146067 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f79f439d-6ac0-4ebc-8ac8-1023ec207254-ovn-controller-tls-certs\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.161724 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/f79f439d-6ac0-4ebc-8ac8-1023ec207254-ovn-controller-tls-certs\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.185971 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs8f6\" (UniqueName: \"kubernetes.io/projected/f79f439d-6ac0-4ebc-8ac8-1023ec207254-kube-api-access-gs8f6\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.186103 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f79f439d-6ac0-4ebc-8ac8-1023ec207254-combined-ca-bundle\") pod \"ovn-controller-j9nmw\" (UID: \"f79f439d-6ac0-4ebc-8ac8-1023ec207254\") " pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.237323 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.247913 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-289hq\" (UniqueName: \"kubernetes.io/projected/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-kube-api-access-289hq\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.247964 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-lib\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248008 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-log\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248033 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-run\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248064 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-etc-ovs\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248094 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-scripts\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248265 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-lib\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248342 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-run\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248418 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-var-log\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.248572 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-etc-ovs\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.250122 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-scripts\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.271380 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-289hq\" (UniqueName: \"kubernetes.io/projected/c64b1bab-fa16-4b15-b9f7-7d821c4a8059-kube-api-access-289hq\") pod \"ovn-controller-ovs-bbqvq\" (UID: \"c64b1bab-fa16-4b15-b9f7-7d821c4a8059\") " pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.442926 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.745933 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:36:58 crc kubenswrapper[4669]: I1210 15:36:58.746042 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:36:58 crc kubenswrapper[4669]: E1210 15:36:58.945807 4669 secret.go:188] Couldn't get secret openstack/cert-ovndbcluster-nb-ovndbs: failed to sync secret cache: timed out waiting for the condition Dec 10 15:36:58 crc kubenswrapper[4669]: E1210 15:36:58.946193 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdbserver-nb-tls-certs podName:8d18be2e-49c2-413b-87d3-c76d505b482b nodeName:}" failed. No retries permitted until 2025-12-10 15:36:59.446157032 +0000 UTC m=+993.363103659 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "ovsdbserver-nb-tls-certs" (UniqueName: "kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdbserver-nb-tls-certs") pod "ovsdbserver-nb-0" (UID: "8d18be2e-49c2-413b-87d3-c76d505b482b") : failed to sync secret cache: timed out waiting for the condition Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.138701 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.488556 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.531796 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/8d18be2e-49c2-413b-87d3-c76d505b482b-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"8d18be2e-49c2-413b-87d3-c76d505b482b\") " pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.539159 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.901924 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.904710 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.908955 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.909406 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.909824 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-xtqs4" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.910022 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 10 15:36:59 crc kubenswrapper[4669]: I1210 15:36:59.914078 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108683 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7s8vh\" (UniqueName: \"kubernetes.io/projected/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-kube-api-access-7s8vh\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108737 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108797 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108839 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108856 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108873 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-config\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108901 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.108920 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.209934 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.209988 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210020 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-config\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210054 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210085 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210143 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7s8vh\" (UniqueName: \"kubernetes.io/projected/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-kube-api-access-7s8vh\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210180 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210277 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.210804 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.211042 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.211641 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-config\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.212073 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.217415 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.230365 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7s8vh\" (UniqueName: \"kubernetes.io/projected/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-kube-api-access-7s8vh\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.234179 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.234802 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.236733 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"ovsdbserver-sb-0\" (UID: \"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2\") " pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:00 crc kubenswrapper[4669]: I1210 15:37:00.537180 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:05 crc kubenswrapper[4669]: W1210 15:37:05.429528 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod66f18088_3aa3_4dfc_b85b_9289f133a199.slice/crio-8b7dff7fdf64d9cc58517d08edd42e3d67c2b3e8d4ac978d8a8b6f0d83b18314 WatchSource:0}: Error finding container 8b7dff7fdf64d9cc58517d08edd42e3d67c2b3e8d4ac978d8a8b6f0d83b18314: Status 404 returned error can't find the container with id 8b7dff7fdf64d9cc58517d08edd42e3d67c2b3e8d4ac978d8a8b6f0d83b18314 Dec 10 15:37:06 crc kubenswrapper[4669]: I1210 15:37:06.416497 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"66f18088-3aa3-4dfc-b85b-9289f133a199","Type":"ContainerStarted","Data":"8b7dff7fdf64d9cc58517d08edd42e3d67c2b3e8d4ac978d8a8b6f0d83b18314"} Dec 10 15:37:09 crc kubenswrapper[4669]: E1210 15:37:09.429118 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 10 15:37:09 crc kubenswrapper[4669]: E1210 15:37:09.429638 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6fvq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(b359c954-51b4-401c-a783-f0220d650a4b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:37:09 crc kubenswrapper[4669]: E1210 15:37:09.431113 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="b359c954-51b4-401c-a783-f0220d650a4b" Dec 10 15:37:09 crc kubenswrapper[4669]: E1210 15:37:09.450531 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="b359c954-51b4-401c-a783-f0220d650a4b" Dec 10 15:37:12 crc kubenswrapper[4669]: I1210 15:37:12.115476 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 10 15:37:18 crc kubenswrapper[4669]: I1210 15:37:18.543706 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2","Type":"ContainerStarted","Data":"d61f88afc110831d819b6787f590c924b356e6398cfd37b1773e394ce56951ac"} Dec 10 15:37:19 crc kubenswrapper[4669]: E1210 15:37:19.936633 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 15:37:19 crc kubenswrapper[4669]: E1210 15:37:19.940319 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5gd8w,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-mnzm4_openstack(76773d12-e582-4a80-bad2-f1d8924c7ce0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:37:19 crc kubenswrapper[4669]: E1210 15:37:19.941455 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:19.999689 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:19.999885 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-grkxr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-zz5x5_openstack(8ebba8c4-fe89-43dd-a8fd-2be0298a5622): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:19.999984 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.000063 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-smg2k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-2gn25_openstack(252bdb2d-5492-4583-872d-50dd50e34984): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.001451 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" podUID="252bdb2d-5492-4583-872d-50dd50e34984" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.001516 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" podUID="8ebba8c4-fe89-43dd-a8fd-2be0298a5622" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.067668 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.068007 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-77hgh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-4bqxl_openstack(721f9831-e2ee-4f78-a943-a19a0d6cdeab): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.069576 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" podUID="721f9831-e2ee-4f78-a943-a19a0d6cdeab" Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.486092 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j9nmw"] Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.561908 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7873e2f3-ba85-4e59-8866-dab32f5604c4","Type":"ContainerStarted","Data":"96cc2e42a028217527356749cb91fb553fb30ce36694ac440de54e1559eb29a8"} Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.567494 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw" event={"ID":"f79f439d-6ac0-4ebc-8ac8-1023ec207254","Type":"ContainerStarted","Data":"28bc986666c4b4139c0bc08a3c5c9c4257c0b86b63685f5e558b40193bc8ba6b"} Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.571124 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"970268b9-aefe-4481-a415-94b74a1de83c","Type":"ContainerStarted","Data":"d22bdf72705aa3d0ae2afa192742cd7a8b2514948611621ec0eb5365766f10ad"} Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.573642 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"66f18088-3aa3-4dfc-b85b-9289f133a199","Type":"ContainerStarted","Data":"8722b1ea568d9bdced453ddae717bbb697cf2f4120e1fd10b14dcc0299b6adc5"} Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.573670 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.579494 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" Dec 10 15:37:20 crc kubenswrapper[4669]: E1210 15:37:20.579765 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" podUID="252bdb2d-5492-4583-872d-50dd50e34984" Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.664520 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.778975 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.17386697 podStartE2EDuration="29.778957676s" podCreationTimestamp="2025-12-10 15:36:51 +0000 UTC" firstStartedPulling="2025-12-10 15:37:05.437395577 +0000 UTC m=+999.354342234" lastFinishedPulling="2025-12-10 15:37:20.042486313 +0000 UTC m=+1013.959432940" observedRunningTime="2025-12-10 15:37:20.741806544 +0000 UTC m=+1014.658753161" watchObservedRunningTime="2025-12-10 15:37:20.778957676 +0000 UTC m=+1014.695904303" Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.827207 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 10 15:37:20 crc kubenswrapper[4669]: I1210 15:37:20.903349 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-bbqvq"] Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.219229 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.241290 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.362708 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-dns-svc\") pod \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.362816 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grkxr\" (UniqueName: \"kubernetes.io/projected/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-kube-api-access-grkxr\") pod \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.362878 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77hgh\" (UniqueName: \"kubernetes.io/projected/721f9831-e2ee-4f78-a943-a19a0d6cdeab-kube-api-access-77hgh\") pod \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.362925 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-config\") pod \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\" (UID: \"721f9831-e2ee-4f78-a943-a19a0d6cdeab\") " Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.363019 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-config\") pod \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\" (UID: \"8ebba8c4-fe89-43dd-a8fd-2be0298a5622\") " Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.367450 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-config" (OuterVolumeSpecName: "config") pod "8ebba8c4-fe89-43dd-a8fd-2be0298a5622" (UID: "8ebba8c4-fe89-43dd-a8fd-2be0298a5622"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.368301 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "721f9831-e2ee-4f78-a943-a19a0d6cdeab" (UID: "721f9831-e2ee-4f78-a943-a19a0d6cdeab"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.369190 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-config" (OuterVolumeSpecName: "config") pod "721f9831-e2ee-4f78-a943-a19a0d6cdeab" (UID: "721f9831-e2ee-4f78-a943-a19a0d6cdeab"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.374044 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/721f9831-e2ee-4f78-a943-a19a0d6cdeab-kube-api-access-77hgh" (OuterVolumeSpecName: "kube-api-access-77hgh") pod "721f9831-e2ee-4f78-a943-a19a0d6cdeab" (UID: "721f9831-e2ee-4f78-a943-a19a0d6cdeab"). InnerVolumeSpecName "kube-api-access-77hgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.378182 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-kube-api-access-grkxr" (OuterVolumeSpecName: "kube-api-access-grkxr") pod "8ebba8c4-fe89-43dd-a8fd-2be0298a5622" (UID: "8ebba8c4-fe89-43dd-a8fd-2be0298a5622"). InnerVolumeSpecName "kube-api-access-grkxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.464514 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.464545 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.464556 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grkxr\" (UniqueName: \"kubernetes.io/projected/8ebba8c4-fe89-43dd-a8fd-2be0298a5622-kube-api-access-grkxr\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.464566 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77hgh\" (UniqueName: \"kubernetes.io/projected/721f9831-e2ee-4f78-a943-a19a0d6cdeab-kube-api-access-77hgh\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.464575 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/721f9831-e2ee-4f78-a943-a19a0d6cdeab-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.582141 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" event={"ID":"721f9831-e2ee-4f78-a943-a19a0d6cdeab","Type":"ContainerDied","Data":"32becb2fd3b6b49fb4678b12285fa7823ef6bd354b336653f7621b17b3865ed7"} Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.582261 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-4bqxl" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.585592 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7714ed30-3730-4a63-8d4d-2b7e097cadbc","Type":"ContainerStarted","Data":"f1101581d3a1607b98d6dbff73d1eae1d9b3c53a86cade61351b25b3c4420773"} Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.588264 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbqvq" event={"ID":"c64b1bab-fa16-4b15-b9f7-7d821c4a8059","Type":"ContainerStarted","Data":"f185ffeb1c61edebe7fbbc7f7a24b4905e90b370ada270b360194094556b209a"} Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.592293 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"8d18be2e-49c2-413b-87d3-c76d505b482b","Type":"ContainerStarted","Data":"0df6e56dd8ad6f1fcd6dbb65bbdbdd1b74c4a16f59735307895ead9adf7d0073"} Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.593649 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" event={"ID":"8ebba8c4-fe89-43dd-a8fd-2be0298a5622","Type":"ContainerDied","Data":"dcc58f84d135886f656336316c1611ed84002014e80d2bedcf8e3145fdd356eb"} Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.593704 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-zz5x5" Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.609667 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c20308b7-707d-45bc-bda1-b33edf9e2d09","Type":"ContainerStarted","Data":"f9bd99141a91dbd1585909133cbe7132afb5869623ef65d51c57eb886a76a0d8"} Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.663876 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4bqxl"] Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.679267 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-4bqxl"] Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.712513 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zz5x5"] Dec 10 15:37:21 crc kubenswrapper[4669]: I1210 15:37:21.719099 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-zz5x5"] Dec 10 15:37:22 crc kubenswrapper[4669]: I1210 15:37:22.410483 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="721f9831-e2ee-4f78-a943-a19a0d6cdeab" path="/var/lib/kubelet/pods/721f9831-e2ee-4f78-a943-a19a0d6cdeab/volumes" Dec 10 15:37:22 crc kubenswrapper[4669]: I1210 15:37:22.411076 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ebba8c4-fe89-43dd-a8fd-2be0298a5622" path="/var/lib/kubelet/pods/8ebba8c4-fe89-43dd-a8fd-2be0298a5622/volumes" Dec 10 15:37:24 crc kubenswrapper[4669]: I1210 15:37:24.633227 4669 generic.go:334] "Generic (PLEG): container finished" podID="970268b9-aefe-4481-a415-94b74a1de83c" containerID="d22bdf72705aa3d0ae2afa192742cd7a8b2514948611621ec0eb5365766f10ad" exitCode=0 Dec 10 15:37:24 crc kubenswrapper[4669]: I1210 15:37:24.633267 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"970268b9-aefe-4481-a415-94b74a1de83c","Type":"ContainerDied","Data":"d22bdf72705aa3d0ae2afa192742cd7a8b2514948611621ec0eb5365766f10ad"} Dec 10 15:37:24 crc kubenswrapper[4669]: I1210 15:37:24.638031 4669 generic.go:334] "Generic (PLEG): container finished" podID="7873e2f3-ba85-4e59-8866-dab32f5604c4" containerID="96cc2e42a028217527356749cb91fb553fb30ce36694ac440de54e1559eb29a8" exitCode=0 Dec 10 15:37:24 crc kubenswrapper[4669]: I1210 15:37:24.638068 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7873e2f3-ba85-4e59-8866-dab32f5604c4","Type":"ContainerDied","Data":"96cc2e42a028217527356749cb91fb553fb30ce36694ac440de54e1559eb29a8"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.654289 4669 generic.go:334] "Generic (PLEG): container finished" podID="c64b1bab-fa16-4b15-b9f7-7d821c4a8059" containerID="fbc6e57c1f0df634a89e543b1db12e5c9565ac440935583c0abac85549409ade" exitCode=0 Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.654399 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbqvq" event={"ID":"c64b1bab-fa16-4b15-b9f7-7d821c4a8059","Type":"ContainerDied","Data":"fbc6e57c1f0df634a89e543b1db12e5c9565ac440935583c0abac85549409ade"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.657082 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw" event={"ID":"f79f439d-6ac0-4ebc-8ac8-1023ec207254","Type":"ContainerStarted","Data":"f0ab681def3a8c4480587b84bf5282448ba52310907484100a9017905be381e4"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.657204 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-j9nmw" Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.661357 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"8d18be2e-49c2-413b-87d3-c76d505b482b","Type":"ContainerStarted","Data":"4dd2a69b99a545ec95c03c87ef320c612001b6adfaea55dd841f36b3f8c57f7f"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.663926 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"970268b9-aefe-4481-a415-94b74a1de83c","Type":"ContainerStarted","Data":"434c8703c23995da29e1cee4fa3dc2bcdf878685c0734bf21630ef44f5bb373c"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.666082 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c20308b7-707d-45bc-bda1-b33edf9e2d09","Type":"ContainerStarted","Data":"f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.666256 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.669555 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2","Type":"ContainerStarted","Data":"ea2fe6692cfa805d82b5981ea171319445bebc0f130d2402717fbc2ef7e07e32"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.676835 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7873e2f3-ba85-4e59-8866-dab32f5604c4","Type":"ContainerStarted","Data":"8d0eecf83049a59863e70839a1e855e8a816b38b71f33a1331620856027f5d96"} Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.699813 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-j9nmw" podStartSLOduration=24.380317834 podStartE2EDuration="29.699799594s" podCreationTimestamp="2025-12-10 15:36:57 +0000 UTC" firstStartedPulling="2025-12-10 15:37:20.505181027 +0000 UTC m=+1014.422127654" lastFinishedPulling="2025-12-10 15:37:25.824662787 +0000 UTC m=+1019.741609414" observedRunningTime="2025-12-10 15:37:26.699424245 +0000 UTC m=+1020.616371002" watchObservedRunningTime="2025-12-10 15:37:26.699799594 +0000 UTC m=+1020.616746221" Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.725805 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=9.33507711 podStartE2EDuration="36.72578395s" podCreationTimestamp="2025-12-10 15:36:50 +0000 UTC" firstStartedPulling="2025-12-10 15:36:52.69473173 +0000 UTC m=+986.611678357" lastFinishedPulling="2025-12-10 15:37:20.08543857 +0000 UTC m=+1014.002385197" observedRunningTime="2025-12-10 15:37:26.718497069 +0000 UTC m=+1020.635443696" watchObservedRunningTime="2025-12-10 15:37:26.72578395 +0000 UTC m=+1020.642730577" Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.741996 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=28.549051065 podStartE2EDuration="33.741979402s" podCreationTimestamp="2025-12-10 15:36:53 +0000 UTC" firstStartedPulling="2025-12-10 15:37:20.742489721 +0000 UTC m=+1014.659436338" lastFinishedPulling="2025-12-10 15:37:25.935418058 +0000 UTC m=+1019.852364675" observedRunningTime="2025-12-10 15:37:26.739709825 +0000 UTC m=+1020.656656452" watchObservedRunningTime="2025-12-10 15:37:26.741979402 +0000 UTC m=+1020.658926029" Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.758896 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=10.216204127 podStartE2EDuration="38.758879952s" podCreationTimestamp="2025-12-10 15:36:48 +0000 UTC" firstStartedPulling="2025-12-10 15:36:51.388448183 +0000 UTC m=+985.305394810" lastFinishedPulling="2025-12-10 15:37:19.931124008 +0000 UTC m=+1013.848070635" observedRunningTime="2025-12-10 15:37:26.753822207 +0000 UTC m=+1020.670768834" watchObservedRunningTime="2025-12-10 15:37:26.758879952 +0000 UTC m=+1020.675826579" Dec 10 15:37:26 crc kubenswrapper[4669]: I1210 15:37:26.984292 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 10 15:37:27 crc kubenswrapper[4669]: I1210 15:37:27.691117 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b359c954-51b4-401c-a783-f0220d650a4b","Type":"ContainerStarted","Data":"f70abc287368477df634eb5a5310c6298fb195497f7e904832ecee389e85ffea"} Dec 10 15:37:27 crc kubenswrapper[4669]: I1210 15:37:27.700948 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbqvq" event={"ID":"c64b1bab-fa16-4b15-b9f7-7d821c4a8059","Type":"ContainerStarted","Data":"e431adef1fffe3e6ea4cea394293d917fc58502dc4256baa5321f5f11550f676"} Dec 10 15:37:28 crc kubenswrapper[4669]: I1210 15:37:28.745776 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:37:28 crc kubenswrapper[4669]: I1210 15:37:28.746287 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:37:28 crc kubenswrapper[4669]: I1210 15:37:28.746370 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:37:28 crc kubenswrapper[4669]: I1210 15:37:28.747683 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"482e52d3d8c64b3e83e8a6d04d8d5d20434b81c087e0b47c0a8e6b34cdbf278e"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:37:28 crc kubenswrapper[4669]: I1210 15:37:28.747822 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://482e52d3d8c64b3e83e8a6d04d8d5d20434b81c087e0b47c0a8e6b34cdbf278e" gracePeriod=600 Dec 10 15:37:30 crc kubenswrapper[4669]: I1210 15:37:30.474314 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 10 15:37:30 crc kubenswrapper[4669]: I1210 15:37:30.475247 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 10 15:37:31 crc kubenswrapper[4669]: I1210 15:37:31.609497 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 10 15:37:31 crc kubenswrapper[4669]: I1210 15:37:31.610630 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 10 15:37:31 crc kubenswrapper[4669]: I1210 15:37:31.753271 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-bbqvq" event={"ID":"c64b1bab-fa16-4b15-b9f7-7d821c4a8059","Type":"ContainerStarted","Data":"b36941f8231d6b528f0bde8ed617251801b7d9cdec4d9eb94221a2ad5a88a655"} Dec 10 15:37:32 crc kubenswrapper[4669]: I1210 15:37:32.764774 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="482e52d3d8c64b3e83e8a6d04d8d5d20434b81c087e0b47c0a8e6b34cdbf278e" exitCode=0 Dec 10 15:37:32 crc kubenswrapper[4669]: I1210 15:37:32.767671 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"482e52d3d8c64b3e83e8a6d04d8d5d20434b81c087e0b47c0a8e6b34cdbf278e"} Dec 10 15:37:32 crc kubenswrapper[4669]: I1210 15:37:32.767710 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:37:32 crc kubenswrapper[4669]: I1210 15:37:32.767722 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:37:32 crc kubenswrapper[4669]: I1210 15:37:32.767740 4669 scope.go:117] "RemoveContainer" containerID="c16924e004007c4e5fe251725834049c68819cdeff3df1d8eef2127a3516ef0e" Dec 10 15:37:33 crc kubenswrapper[4669]: I1210 15:37:33.421069 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-bbqvq" podStartSLOduration=31.519630836 podStartE2EDuration="36.421051803s" podCreationTimestamp="2025-12-10 15:36:57 +0000 UTC" firstStartedPulling="2025-12-10 15:37:20.923317702 +0000 UTC m=+1014.840264329" lastFinishedPulling="2025-12-10 15:37:25.824738629 +0000 UTC m=+1019.741685296" observedRunningTime="2025-12-10 15:37:32.79371153 +0000 UTC m=+1026.710658167" watchObservedRunningTime="2025-12-10 15:37:33.421051803 +0000 UTC m=+1027.337998430" Dec 10 15:37:33 crc kubenswrapper[4669]: I1210 15:37:33.850410 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.780967 4669 generic.go:334] "Generic (PLEG): container finished" podID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerID="63487e734e6a2086283e6e869403d35d513c7028c383f40460a8b6fc91a481ac" exitCode=0 Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.781046 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" event={"ID":"76773d12-e582-4a80-bad2-f1d8924c7ce0","Type":"ContainerDied","Data":"63487e734e6a2086283e6e869403d35d513c7028c383f40460a8b6fc91a481ac"} Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.787319 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2","Type":"ContainerStarted","Data":"718b60b05c334571a6f51f93b5c3c8a2c808d544b8d81dd6cc27a855b617b0f5"} Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.790807 4669 generic.go:334] "Generic (PLEG): container finished" podID="252bdb2d-5492-4583-872d-50dd50e34984" containerID="00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de" exitCode=0 Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.790846 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" event={"ID":"252bdb2d-5492-4583-872d-50dd50e34984","Type":"ContainerDied","Data":"00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de"} Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.796073 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"8d18be2e-49c2-413b-87d3-c76d505b482b","Type":"ContainerStarted","Data":"ef7b41d3f3b37ed0459da2bef9a343282d8338b9dc4ffb2794ecdf6c193459a8"} Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.807870 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"706b5365ceb6404033f138d584d9ab3c0d60c4c6dec40f2cfffaa838889f4944"} Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.846577 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=25.630485607 podStartE2EDuration="38.846559381s" podCreationTimestamp="2025-12-10 15:36:56 +0000 UTC" firstStartedPulling="2025-12-10 15:37:20.90513334 +0000 UTC m=+1014.822079967" lastFinishedPulling="2025-12-10 15:37:34.121207104 +0000 UTC m=+1028.038153741" observedRunningTime="2025-12-10 15:37:34.844010589 +0000 UTC m=+1028.760957216" watchObservedRunningTime="2025-12-10 15:37:34.846559381 +0000 UTC m=+1028.763506008" Dec 10 15:37:34 crc kubenswrapper[4669]: I1210 15:37:34.930807 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=21.120703798 podStartE2EDuration="36.930791164s" podCreationTimestamp="2025-12-10 15:36:58 +0000 UTC" firstStartedPulling="2025-12-10 15:37:18.311148289 +0000 UTC m=+1012.228094926" lastFinishedPulling="2025-12-10 15:37:34.121235665 +0000 UTC m=+1028.038182292" observedRunningTime="2025-12-10 15:37:34.925128313 +0000 UTC m=+1028.842074950" watchObservedRunningTime="2025-12-10 15:37:34.930791164 +0000 UTC m=+1028.847737791" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.537669 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.540489 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.634093 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.689516 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.759846 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.818652 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" event={"ID":"252bdb2d-5492-4583-872d-50dd50e34984","Type":"ContainerStarted","Data":"c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544"} Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.819541 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.821346 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" event={"ID":"76773d12-e582-4a80-bad2-f1d8924c7ce0","Type":"ContainerStarted","Data":"5a05554ec641c88b4a65c757f84e090dcc9e958d4bc0ef4512fe7b10dea438b9"} Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.821419 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.837152 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" podStartSLOduration=2.806318354 podStartE2EDuration="48.837131126s" podCreationTimestamp="2025-12-10 15:36:47 +0000 UTC" firstStartedPulling="2025-12-10 15:36:48.08870585 +0000 UTC m=+982.005652477" lastFinishedPulling="2025-12-10 15:37:34.119518622 +0000 UTC m=+1028.036465249" observedRunningTime="2025-12-10 15:37:35.835509296 +0000 UTC m=+1029.752455943" watchObservedRunningTime="2025-12-10 15:37:35.837131126 +0000 UTC m=+1029.754077763" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.865481 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" podStartSLOduration=3.579520011 podStartE2EDuration="49.865440369s" podCreationTimestamp="2025-12-10 15:36:46 +0000 UTC" firstStartedPulling="2025-12-10 15:36:47.837352598 +0000 UTC m=+981.754299225" lastFinishedPulling="2025-12-10 15:37:34.123272956 +0000 UTC m=+1028.040219583" observedRunningTime="2025-12-10 15:37:35.856795535 +0000 UTC m=+1029.773742172" watchObservedRunningTime="2025-12-10 15:37:35.865440369 +0000 UTC m=+1029.782387006" Dec 10 15:37:35 crc kubenswrapper[4669]: I1210 15:37:35.902974 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.182841 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-mnzm4"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.228053 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-8j42d"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.229694 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.231291 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.243408 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-8j42d"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.308267 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-z2z75"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.317453 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.319456 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q994p\" (UniqueName: \"kubernetes.io/projected/c83e03df-8360-45d3-b6c7-66b70de295db-kube-api-access-q994p\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.319558 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.319701 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-config\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.319744 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.322655 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.338045 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-z2z75"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.421589 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422007 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68sjc\" (UniqueName: \"kubernetes.io/projected/e8e724f9-542f-4eae-96db-60eb825c5de0-kube-api-access-68sjc\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422048 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-config\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422073 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422172 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8e724f9-542f-4eae-96db-60eb825c5de0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422198 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q994p\" (UniqueName: \"kubernetes.io/projected/c83e03df-8360-45d3-b6c7-66b70de295db-kube-api-access-q994p\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422254 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e8e724f9-542f-4eae-96db-60eb825c5de0-ovs-rundir\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422278 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e724f9-542f-4eae-96db-60eb825c5de0-combined-ca-bundle\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422348 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e8e724f9-542f-4eae-96db-60eb825c5de0-ovn-rundir\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.422402 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8e724f9-542f-4eae-96db-60eb825c5de0-config\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.423328 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-config\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.423373 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.423480 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.449396 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q994p\" (UniqueName: \"kubernetes.io/projected/c83e03df-8360-45d3-b6c7-66b70de295db-kube-api-access-q994p\") pod \"dnsmasq-dns-5bf47b49b7-8j42d\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.523349 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68sjc\" (UniqueName: \"kubernetes.io/projected/e8e724f9-542f-4eae-96db-60eb825c5de0-kube-api-access-68sjc\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.523415 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8e724f9-542f-4eae-96db-60eb825c5de0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.523460 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e8e724f9-542f-4eae-96db-60eb825c5de0-ovs-rundir\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.523776 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e8e724f9-542f-4eae-96db-60eb825c5de0-ovs-rundir\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.523478 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e724f9-542f-4eae-96db-60eb825c5de0-combined-ca-bundle\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.524363 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e8e724f9-542f-4eae-96db-60eb825c5de0-ovn-rundir\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.524400 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8e724f9-542f-4eae-96db-60eb825c5de0-config\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.524490 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e8e724f9-542f-4eae-96db-60eb825c5de0-ovn-rundir\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.524965 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8e724f9-542f-4eae-96db-60eb825c5de0-config\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.527709 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8e724f9-542f-4eae-96db-60eb825c5de0-combined-ca-bundle\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.538033 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.539822 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e8e724f9-542f-4eae-96db-60eb825c5de0-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.544531 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68sjc\" (UniqueName: \"kubernetes.io/projected/e8e724f9-542f-4eae-96db-60eb825c5de0-kube-api-access-68sjc\") pod \"ovn-controller-metrics-z2z75\" (UID: \"e8e724f9-542f-4eae-96db-60eb825c5de0\") " pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.544942 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.639021 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.640907 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-z2z75" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.728294 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2gn25"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.813609 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.833983 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-hrrzd"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.835182 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.839038 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerName="dnsmasq-dns" containerID="cri-o://5a05554ec641c88b4a65c757f84e090dcc9e958d4bc0ef4512fe7b10dea438b9" gracePeriod=10 Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.840775 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.853386 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.855386 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-hrrzd"] Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.953688 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.955764 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.955956 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-config\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.956030 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9txd\" (UniqueName: \"kubernetes.io/projected/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-kube-api-access-q9txd\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.956132 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:36 crc kubenswrapper[4669]: I1210 15:37:36.956243 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-dns-svc\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.035868 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.058672 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.058768 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-config\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.058810 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9txd\" (UniqueName: \"kubernetes.io/projected/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-kube-api-access-q9txd\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.058870 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.058912 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-dns-svc\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.060033 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-dns-svc\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.060310 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-config\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.060929 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.061347 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.087201 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9txd\" (UniqueName: \"kubernetes.io/projected/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-kube-api-access-q9txd\") pod \"dnsmasq-dns-8554648995-hrrzd\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.183266 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.212570 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-z2z75"] Dec 10 15:37:37 crc kubenswrapper[4669]: W1210 15:37:37.225325 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8e724f9_542f_4eae_96db_60eb825c5de0.slice/crio-abcaf893d4a790f58129b65d8f9ea996f044bd4b1eddc9d310026192a336ad3a WatchSource:0}: Error finding container abcaf893d4a790f58129b65d8f9ea996f044bd4b1eddc9d310026192a336ad3a: Status 404 returned error can't find the container with id abcaf893d4a790f58129b65d8f9ea996f044bd4b1eddc9d310026192a336ad3a Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.349622 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-cc10-account-create-update-f46ph"] Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.350710 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.357389 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-cc10-account-create-update-f46ph"] Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.373657 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.414881 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-8j42d"] Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.446278 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.452098 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.456457 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.462566 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-dx7gl" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.464653 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.465876 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.477432 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-operator-scripts\") pod \"glance-cc10-account-create-update-f46ph\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.477772 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69zz8\" (UniqueName: \"kubernetes.io/projected/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-kube-api-access-69zz8\") pod \"glance-cc10-account-create-update-f46ph\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.511480 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.578725 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-operator-scripts\") pod \"glance-cc10-account-create-update-f46ph\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.578931 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69zz8\" (UniqueName: \"kubernetes.io/projected/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-kube-api-access-69zz8\") pod \"glance-cc10-account-create-update-f46ph\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579025 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af1a1d1a-d7b1-4841-ab3c-f643db33079b-scripts\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579130 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579241 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69lq7\" (UniqueName: \"kubernetes.io/projected/af1a1d1a-d7b1-4841-ab3c-f643db33079b-kube-api-access-69lq7\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579378 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579496 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/af1a1d1a-d7b1-4841-ab3c-f643db33079b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579620 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af1a1d1a-d7b1-4841-ab3c-f643db33079b-config\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.579701 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.594561 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-operator-scripts\") pod \"glance-cc10-account-create-update-f46ph\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.604039 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69zz8\" (UniqueName: \"kubernetes.io/projected/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-kube-api-access-69zz8\") pod \"glance-cc10-account-create-update-f46ph\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685527 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685599 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/af1a1d1a-d7b1-4841-ab3c-f643db33079b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685638 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af1a1d1a-d7b1-4841-ab3c-f643db33079b-config\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685652 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685699 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af1a1d1a-d7b1-4841-ab3c-f643db33079b-scripts\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685720 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.685742 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69lq7\" (UniqueName: \"kubernetes.io/projected/af1a1d1a-d7b1-4841-ab3c-f643db33079b-kube-api-access-69lq7\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.690638 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.692094 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/af1a1d1a-d7b1-4841-ab3c-f643db33079b-scripts\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.692428 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/af1a1d1a-d7b1-4841-ab3c-f643db33079b-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.693254 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af1a1d1a-d7b1-4841-ab3c-f643db33079b-config\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.707327 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.718093 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.720791 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/af1a1d1a-d7b1-4841-ab3c-f643db33079b-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.742409 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69lq7\" (UniqueName: \"kubernetes.io/projected/af1a1d1a-d7b1-4841-ab3c-f643db33079b-kube-api-access-69lq7\") pod \"ovn-northd-0\" (UID: \"af1a1d1a-d7b1-4841-ab3c-f643db33079b\") " pod="openstack/ovn-northd-0" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.880820 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-z2z75" event={"ID":"e8e724f9-542f-4eae-96db-60eb825c5de0","Type":"ContainerStarted","Data":"fdceb2fa8f4655b9139fa886e33f0bd066345c17b3d51bba9f1a8273637b6fe7"} Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.880860 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-z2z75" event={"ID":"e8e724f9-542f-4eae-96db-60eb825c5de0","Type":"ContainerStarted","Data":"abcaf893d4a790f58129b65d8f9ea996f044bd4b1eddc9d310026192a336ad3a"} Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.889744 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-hrrzd"] Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.898085 4669 generic.go:334] "Generic (PLEG): container finished" podID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerID="5a05554ec641c88b4a65c757f84e090dcc9e958d4bc0ef4512fe7b10dea438b9" exitCode=0 Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.898145 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" event={"ID":"76773d12-e582-4a80-bad2-f1d8924c7ce0","Type":"ContainerDied","Data":"5a05554ec641c88b4a65c757f84e090dcc9e958d4bc0ef4512fe7b10dea438b9"} Dec 10 15:37:37 crc kubenswrapper[4669]: W1210 15:37:37.915751 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode80eb23e_ed94_41a4_ba96_ae286b6b2bca.slice/crio-ef5d8998a206118bc7b07a7f1d44dc49124a25fe8e5e363c52d357a2f2cc1735 WatchSource:0}: Error finding container ef5d8998a206118bc7b07a7f1d44dc49124a25fe8e5e363c52d357a2f2cc1735: Status 404 returned error can't find the container with id ef5d8998a206118bc7b07a7f1d44dc49124a25fe8e5e363c52d357a2f2cc1735 Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.917329 4669 generic.go:334] "Generic (PLEG): container finished" podID="c83e03df-8360-45d3-b6c7-66b70de295db" containerID="befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee" exitCode=0 Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.918751 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" event={"ID":"c83e03df-8360-45d3-b6c7-66b70de295db","Type":"ContainerDied","Data":"befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee"} Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.918782 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" event={"ID":"c83e03df-8360-45d3-b6c7-66b70de295db","Type":"ContainerStarted","Data":"8f383cebf60fd70821463d37819fa6ea5588a2c41d7809392a7ddcaafd509cec"} Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.919613 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" podUID="252bdb2d-5492-4583-872d-50dd50e34984" containerName="dnsmasq-dns" containerID="cri-o://c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544" gracePeriod=10 Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.935593 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-z2z75" podStartSLOduration=1.93557689 podStartE2EDuration="1.93557689s" podCreationTimestamp="2025-12-10 15:37:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:37.933228962 +0000 UTC m=+1031.850175589" watchObservedRunningTime="2025-12-10 15:37:37.93557689 +0000 UTC m=+1031.852523517" Dec 10 15:37:37 crc kubenswrapper[4669]: I1210 15:37:37.949100 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.156273 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.298793 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gd8w\" (UniqueName: \"kubernetes.io/projected/76773d12-e582-4a80-bad2-f1d8924c7ce0-kube-api-access-5gd8w\") pod \"76773d12-e582-4a80-bad2-f1d8924c7ce0\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.299143 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-dns-svc\") pod \"76773d12-e582-4a80-bad2-f1d8924c7ce0\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.299286 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-config\") pod \"76773d12-e582-4a80-bad2-f1d8924c7ce0\" (UID: \"76773d12-e582-4a80-bad2-f1d8924c7ce0\") " Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.305140 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76773d12-e582-4a80-bad2-f1d8924c7ce0-kube-api-access-5gd8w" (OuterVolumeSpecName: "kube-api-access-5gd8w") pod "76773d12-e582-4a80-bad2-f1d8924c7ce0" (UID: "76773d12-e582-4a80-bad2-f1d8924c7ce0"). InnerVolumeSpecName "kube-api-access-5gd8w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.341385 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76773d12-e582-4a80-bad2-f1d8924c7ce0" (UID: "76773d12-e582-4a80-bad2-f1d8924c7ce0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.407469 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gd8w\" (UniqueName: \"kubernetes.io/projected/76773d12-e582-4a80-bad2-f1d8924c7ce0-kube-api-access-5gd8w\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.407495 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.417813 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-cc10-account-create-update-f46ph"] Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.454325 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-config" (OuterVolumeSpecName: "config") pod "76773d12-e582-4a80-bad2-f1d8924c7ce0" (UID: "76773d12-e582-4a80-bad2-f1d8924c7ce0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.509445 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76773d12-e582-4a80-bad2-f1d8924c7ce0-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.532088 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.609643 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.711820 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-dns-svc\") pod \"252bdb2d-5492-4583-872d-50dd50e34984\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.711962 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smg2k\" (UniqueName: \"kubernetes.io/projected/252bdb2d-5492-4583-872d-50dd50e34984-kube-api-access-smg2k\") pod \"252bdb2d-5492-4583-872d-50dd50e34984\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.711993 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-config\") pod \"252bdb2d-5492-4583-872d-50dd50e34984\" (UID: \"252bdb2d-5492-4583-872d-50dd50e34984\") " Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.734032 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/252bdb2d-5492-4583-872d-50dd50e34984-kube-api-access-smg2k" (OuterVolumeSpecName: "kube-api-access-smg2k") pod "252bdb2d-5492-4583-872d-50dd50e34984" (UID: "252bdb2d-5492-4583-872d-50dd50e34984"). InnerVolumeSpecName "kube-api-access-smg2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.783926 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-config" (OuterVolumeSpecName: "config") pod "252bdb2d-5492-4583-872d-50dd50e34984" (UID: "252bdb2d-5492-4583-872d-50dd50e34984"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.789652 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "252bdb2d-5492-4583-872d-50dd50e34984" (UID: "252bdb2d-5492-4583-872d-50dd50e34984"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.814579 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.814620 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smg2k\" (UniqueName: \"kubernetes.io/projected/252bdb2d-5492-4583-872d-50dd50e34984-kube-api-access-smg2k\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.814634 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/252bdb2d-5492-4583-872d-50dd50e34984-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.927942 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" event={"ID":"76773d12-e582-4a80-bad2-f1d8924c7ce0","Type":"ContainerDied","Data":"b0b817f1ae49ffd038372786f5494b7a0711c6bd831be8d0fda8c0932399908e"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.927987 4669 scope.go:117] "RemoveContainer" containerID="5a05554ec641c88b4a65c757f84e090dcc9e958d4bc0ef4512fe7b10dea438b9" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.928093 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-mnzm4" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.930487 4669 generic.go:334] "Generic (PLEG): container finished" podID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerID="ccf41bac5a763b89aea2c4e697418c1b1a20f12a4fe884d831bfa2aa10c178f3" exitCode=0 Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.930545 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-hrrzd" event={"ID":"e80eb23e-ed94-41a4-ba96-ae286b6b2bca","Type":"ContainerDied","Data":"ccf41bac5a763b89aea2c4e697418c1b1a20f12a4fe884d831bfa2aa10c178f3"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.930572 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-hrrzd" event={"ID":"e80eb23e-ed94-41a4-ba96-ae286b6b2bca","Type":"ContainerStarted","Data":"ef5d8998a206118bc7b07a7f1d44dc49124a25fe8e5e363c52d357a2f2cc1735"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.934981 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc10-account-create-update-f46ph" event={"ID":"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8","Type":"ContainerStarted","Data":"39c338317eb1c5903a455654e5c02cf5cda10309cd7b5c09ba036aa7ebe9b4a1"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.935031 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc10-account-create-update-f46ph" event={"ID":"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8","Type":"ContainerStarted","Data":"f3ef925d0f7cd08b32f420c1a5cf1f9b03999029fa7077c181bc19d32a470fb0"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.937829 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"af1a1d1a-d7b1-4841-ab3c-f643db33079b","Type":"ContainerStarted","Data":"ae33e4eba0924d09f9110e24265955d915216b2089978ecd2c31165cd926202c"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.948130 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" event={"ID":"c83e03df-8360-45d3-b6c7-66b70de295db","Type":"ContainerStarted","Data":"7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.948464 4669 scope.go:117] "RemoveContainer" containerID="63487e734e6a2086283e6e869403d35d513c7028c383f40460a8b6fc91a481ac" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.948648 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.971254 4669 generic.go:334] "Generic (PLEG): container finished" podID="252bdb2d-5492-4583-872d-50dd50e34984" containerID="c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544" exitCode=0 Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.972044 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.972379 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" event={"ID":"252bdb2d-5492-4583-872d-50dd50e34984","Type":"ContainerDied","Data":"c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544"} Dec 10 15:37:38 crc kubenswrapper[4669]: I1210 15:37:38.972524 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-2gn25" event={"ID":"252bdb2d-5492-4583-872d-50dd50e34984","Type":"ContainerDied","Data":"029fff73296eefd642482c2d87b4c69b75ec7d3d7a1993a26fc31d24ed022d52"} Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.006534 4669 scope.go:117] "RemoveContainer" containerID="c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.009768 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-cc10-account-create-update-f46ph" podStartSLOduration=2.009756902 podStartE2EDuration="2.009756902s" podCreationTimestamp="2025-12-10 15:37:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:38.965736888 +0000 UTC m=+1032.882683535" watchObservedRunningTime="2025-12-10 15:37:39.009756902 +0000 UTC m=+1032.926703529" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.016926 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" podStartSLOduration=3.0169111 podStartE2EDuration="3.0169111s" podCreationTimestamp="2025-12-10 15:37:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:38.984808662 +0000 UTC m=+1032.901755309" watchObservedRunningTime="2025-12-10 15:37:39.0169111 +0000 UTC m=+1032.933857727" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.024754 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-mnzm4"] Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.031463 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-mnzm4"] Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.052691 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2gn25"] Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.063936 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-2gn25"] Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.064438 4669 scope.go:117] "RemoveContainer" containerID="00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.093563 4669 scope.go:117] "RemoveContainer" containerID="c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544" Dec 10 15:37:39 crc kubenswrapper[4669]: E1210 15:37:39.094024 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544\": container with ID starting with c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544 not found: ID does not exist" containerID="c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.094063 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544"} err="failed to get container status \"c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544\": rpc error: code = NotFound desc = could not find container \"c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544\": container with ID starting with c0967dc6d813f4cc1209957de893047195867331f6eaa9a851b6a35194437544 not found: ID does not exist" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.094092 4669 scope.go:117] "RemoveContainer" containerID="00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de" Dec 10 15:37:39 crc kubenswrapper[4669]: E1210 15:37:39.094439 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de\": container with ID starting with 00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de not found: ID does not exist" containerID="00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.094471 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de"} err="failed to get container status \"00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de\": rpc error: code = NotFound desc = could not find container \"00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de\": container with ID starting with 00d618f68b87474980f829421d79cc7b1eabaaada2b3351a3e5a55774ba8c6de not found: ID does not exist" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.982198 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-hrrzd" event={"ID":"e80eb23e-ed94-41a4-ba96-ae286b6b2bca","Type":"ContainerStarted","Data":"4852aa868ecee3c0c2b23541cc99e3325687508237051e4c1c74a39c2701436b"} Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.982988 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.986712 4669 generic.go:334] "Generic (PLEG): container finished" podID="4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" containerID="39c338317eb1c5903a455654e5c02cf5cda10309cd7b5c09ba036aa7ebe9b4a1" exitCode=0 Dec 10 15:37:39 crc kubenswrapper[4669]: I1210 15:37:39.986925 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc10-account-create-update-f46ph" event={"ID":"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8","Type":"ContainerDied","Data":"39c338317eb1c5903a455654e5c02cf5cda10309cd7b5c09ba036aa7ebe9b4a1"} Dec 10 15:37:40 crc kubenswrapper[4669]: I1210 15:37:40.012451 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-hrrzd" podStartSLOduration=4.012433588 podStartE2EDuration="4.012433588s" podCreationTimestamp="2025-12-10 15:37:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:40.004909831 +0000 UTC m=+1033.921856478" watchObservedRunningTime="2025-12-10 15:37:40.012433588 +0000 UTC m=+1033.929380215" Dec 10 15:37:40 crc kubenswrapper[4669]: I1210 15:37:40.411193 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="252bdb2d-5492-4583-872d-50dd50e34984" path="/var/lib/kubelet/pods/252bdb2d-5492-4583-872d-50dd50e34984/volumes" Dec 10 15:37:40 crc kubenswrapper[4669]: I1210 15:37:40.412164 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" path="/var/lib/kubelet/pods/76773d12-e582-4a80-bad2-f1d8924c7ce0/volumes" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:40.999718 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"af1a1d1a-d7b1-4841-ab3c-f643db33079b","Type":"ContainerStarted","Data":"293acd67c3f0ea473222dfca55323e894254b57af9017c7d8708a68556a36a56"} Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.000077 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"af1a1d1a-d7b1-4841-ab3c-f643db33079b","Type":"ContainerStarted","Data":"3d2f6f7efb837da7f2044f0eab2e9f1032f01ab77b9c746156118ee36989114a"} Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.029198 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.526845906 podStartE2EDuration="4.029171162s" podCreationTimestamp="2025-12-10 15:37:37 +0000 UTC" firstStartedPulling="2025-12-10 15:37:38.550169447 +0000 UTC m=+1032.467116074" lastFinishedPulling="2025-12-10 15:37:40.052494703 +0000 UTC m=+1033.969441330" observedRunningTime="2025-12-10 15:37:41.024851695 +0000 UTC m=+1034.941798332" watchObservedRunningTime="2025-12-10 15:37:41.029171162 +0000 UTC m=+1034.946117819" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.344074 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.457943 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69zz8\" (UniqueName: \"kubernetes.io/projected/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-kube-api-access-69zz8\") pod \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.458042 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-operator-scripts\") pod \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\" (UID: \"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8\") " Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.458750 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" (UID: "4541fbbe-63d6-478b-9fc6-90eaaa8f67a8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.465455 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-kube-api-access-69zz8" (OuterVolumeSpecName: "kube-api-access-69zz8") pod "4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" (UID: "4541fbbe-63d6-478b-9fc6-90eaaa8f67a8"). InnerVolumeSpecName "kube-api-access-69zz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510255 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-d8v5p"] Dec 10 15:37:41 crc kubenswrapper[4669]: E1210 15:37:41.510575 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252bdb2d-5492-4583-872d-50dd50e34984" containerName="dnsmasq-dns" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510608 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="252bdb2d-5492-4583-872d-50dd50e34984" containerName="dnsmasq-dns" Dec 10 15:37:41 crc kubenswrapper[4669]: E1210 15:37:41.510621 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="252bdb2d-5492-4583-872d-50dd50e34984" containerName="init" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510628 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="252bdb2d-5492-4583-872d-50dd50e34984" containerName="init" Dec 10 15:37:41 crc kubenswrapper[4669]: E1210 15:37:41.510642 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" containerName="mariadb-account-create-update" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510650 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" containerName="mariadb-account-create-update" Dec 10 15:37:41 crc kubenswrapper[4669]: E1210 15:37:41.510669 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerName="init" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510675 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerName="init" Dec 10 15:37:41 crc kubenswrapper[4669]: E1210 15:37:41.510685 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerName="dnsmasq-dns" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510692 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerName="dnsmasq-dns" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510849 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="252bdb2d-5492-4583-872d-50dd50e34984" containerName="dnsmasq-dns" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510863 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="76773d12-e582-4a80-bad2-f1d8924c7ce0" containerName="dnsmasq-dns" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.510875 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" containerName="mariadb-account-create-update" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.511412 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.522256 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-d8v5p"] Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.570863 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69zz8\" (UniqueName: \"kubernetes.io/projected/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-kube-api-access-69zz8\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.571385 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.657331 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6e21-account-create-update-qrl4t"] Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.658305 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.660662 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.673226 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzsvj\" (UniqueName: \"kubernetes.io/projected/c607262e-1448-458e-9135-1581237f17e7-kube-api-access-qzsvj\") pod \"keystone-db-create-d8v5p\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.673467 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c607262e-1448-458e-9135-1581237f17e7-operator-scripts\") pod \"keystone-db-create-d8v5p\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.677571 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6e21-account-create-update-qrl4t"] Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.774751 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c607262e-1448-458e-9135-1581237f17e7-operator-scripts\") pod \"keystone-db-create-d8v5p\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.774825 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082348c7-f4ba-4369-a41e-d633a92ef9ec-operator-scripts\") pod \"keystone-6e21-account-create-update-qrl4t\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.774878 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hlx6\" (UniqueName: \"kubernetes.io/projected/082348c7-f4ba-4369-a41e-d633a92ef9ec-kube-api-access-2hlx6\") pod \"keystone-6e21-account-create-update-qrl4t\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.774916 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzsvj\" (UniqueName: \"kubernetes.io/projected/c607262e-1448-458e-9135-1581237f17e7-kube-api-access-qzsvj\") pod \"keystone-db-create-d8v5p\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.775688 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c607262e-1448-458e-9135-1581237f17e7-operator-scripts\") pod \"keystone-db-create-d8v5p\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.799126 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzsvj\" (UniqueName: \"kubernetes.io/projected/c607262e-1448-458e-9135-1581237f17e7-kube-api-access-qzsvj\") pod \"keystone-db-create-d8v5p\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.829065 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.876436 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082348c7-f4ba-4369-a41e-d633a92ef9ec-operator-scripts\") pod \"keystone-6e21-account-create-update-qrl4t\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.876535 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hlx6\" (UniqueName: \"kubernetes.io/projected/082348c7-f4ba-4369-a41e-d633a92ef9ec-kube-api-access-2hlx6\") pod \"keystone-6e21-account-create-update-qrl4t\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.877755 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082348c7-f4ba-4369-a41e-d633a92ef9ec-operator-scripts\") pod \"keystone-6e21-account-create-update-qrl4t\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.893079 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hlx6\" (UniqueName: \"kubernetes.io/projected/082348c7-f4ba-4369-a41e-d633a92ef9ec-kube-api-access-2hlx6\") pod \"keystone-6e21-account-create-update-qrl4t\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.958960 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-nh95v"] Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.960879 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nh95v" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.971528 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:41 crc kubenswrapper[4669]: I1210 15:37:41.982365 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nh95v"] Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.091366 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxhk2\" (UniqueName: \"kubernetes.io/projected/343b34aa-07cb-45f2-a070-b64466e0d681-kube-api-access-rxhk2\") pod \"placement-db-create-nh95v\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.092380 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/343b34aa-07cb-45f2-a070-b64466e0d681-operator-scripts\") pod \"placement-db-create-nh95v\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.099194 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-cc10-account-create-update-f46ph" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.099745 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-cc10-account-create-update-f46ph" event={"ID":"4541fbbe-63d6-478b-9fc6-90eaaa8f67a8","Type":"ContainerDied","Data":"f3ef925d0f7cd08b32f420c1a5cf1f9b03999029fa7077c181bc19d32a470fb0"} Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.099789 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3ef925d0f7cd08b32f420c1a5cf1f9b03999029fa7077c181bc19d32a470fb0" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.099810 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.196088 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxhk2\" (UniqueName: \"kubernetes.io/projected/343b34aa-07cb-45f2-a070-b64466e0d681-kube-api-access-rxhk2\") pod \"placement-db-create-nh95v\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.196191 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/343b34aa-07cb-45f2-a070-b64466e0d681-operator-scripts\") pod \"placement-db-create-nh95v\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.197210 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/343b34aa-07cb-45f2-a070-b64466e0d681-operator-scripts\") pod \"placement-db-create-nh95v\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.228028 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxhk2\" (UniqueName: \"kubernetes.io/projected/343b34aa-07cb-45f2-a070-b64466e0d681-kube-api-access-rxhk2\") pod \"placement-db-create-nh95v\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.240662 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-beb3-account-create-update-ckz64"] Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.241910 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.244014 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.251876 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-beb3-account-create-update-ckz64"] Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.397555 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nh95v" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.398992 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n22hm\" (UniqueName: \"kubernetes.io/projected/968aa77c-143b-4324-9736-6b9698cc2867-kube-api-access-n22hm\") pod \"placement-beb3-account-create-update-ckz64\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.399102 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/968aa77c-143b-4324-9736-6b9698cc2867-operator-scripts\") pod \"placement-beb3-account-create-update-ckz64\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.476717 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-d8v5p"] Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.501896 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n22hm\" (UniqueName: \"kubernetes.io/projected/968aa77c-143b-4324-9736-6b9698cc2867-kube-api-access-n22hm\") pod \"placement-beb3-account-create-update-ckz64\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.502047 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/968aa77c-143b-4324-9736-6b9698cc2867-operator-scripts\") pod \"placement-beb3-account-create-update-ckz64\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.502821 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/968aa77c-143b-4324-9736-6b9698cc2867-operator-scripts\") pod \"placement-beb3-account-create-update-ckz64\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.533232 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n22hm\" (UniqueName: \"kubernetes.io/projected/968aa77c-143b-4324-9736-6b9698cc2867-kube-api-access-n22hm\") pod \"placement-beb3-account-create-update-ckz64\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.581581 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.635517 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6e21-account-create-update-qrl4t"] Dec 10 15:37:42 crc kubenswrapper[4669]: W1210 15:37:42.657252 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod082348c7_f4ba_4369_a41e_d633a92ef9ec.slice/crio-a7668d47af6df8f829de0c9d0bb554a1fbe25df3ca5d20fb4eeb76d8aeebb16e WatchSource:0}: Error finding container a7668d47af6df8f829de0c9d0bb554a1fbe25df3ca5d20fb4eeb76d8aeebb16e: Status 404 returned error can't find the container with id a7668d47af6df8f829de0c9d0bb554a1fbe25df3ca5d20fb4eeb76d8aeebb16e Dec 10 15:37:42 crc kubenswrapper[4669]: I1210 15:37:42.861540 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-nh95v"] Dec 10 15:37:42 crc kubenswrapper[4669]: W1210 15:37:42.866451 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod343b34aa_07cb_45f2_a070_b64466e0d681.slice/crio-f9e33c2e503ca681a942e146a4f97b2e8f001e3408a32a674f82af3604a7c4df WatchSource:0}: Error finding container f9e33c2e503ca681a942e146a4f97b2e8f001e3408a32a674f82af3604a7c4df: Status 404 returned error can't find the container with id f9e33c2e503ca681a942e146a4f97b2e8f001e3408a32a674f82af3604a7c4df Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.037257 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-beb3-account-create-update-ckz64"] Dec 10 15:37:43 crc kubenswrapper[4669]: W1210 15:37:43.038090 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod968aa77c_143b_4324_9736_6b9698cc2867.slice/crio-e0b2fa8529d7b96443417a171878d516616e845dda2c1e0c94c8412f27f3bccb WatchSource:0}: Error finding container e0b2fa8529d7b96443417a171878d516616e845dda2c1e0c94c8412f27f3bccb: Status 404 returned error can't find the container with id e0b2fa8529d7b96443417a171878d516616e845dda2c1e0c94c8412f27f3bccb Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.106567 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d8v5p" event={"ID":"c607262e-1448-458e-9135-1581237f17e7","Type":"ContainerStarted","Data":"fe7878214509e235c1e469cb35161c6ea9ae7b7be6d0f0cdba7dc381b38f2917"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.106612 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d8v5p" event={"ID":"c607262e-1448-458e-9135-1581237f17e7","Type":"ContainerStarted","Data":"12d2bd7755622731652016be31ba548a4ad9f9ce0ff9799358baecf0ff3f84a3"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.108366 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-beb3-account-create-update-ckz64" event={"ID":"968aa77c-143b-4324-9736-6b9698cc2867","Type":"ContainerStarted","Data":"e0b2fa8529d7b96443417a171878d516616e845dda2c1e0c94c8412f27f3bccb"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.109338 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nh95v" event={"ID":"343b34aa-07cb-45f2-a070-b64466e0d681","Type":"ContainerStarted","Data":"8a5474a6efb65ced282a5247b68d6dc1b8038e2da99eb63e77c694961b1db6e4"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.109358 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nh95v" event={"ID":"343b34aa-07cb-45f2-a070-b64466e0d681","Type":"ContainerStarted","Data":"f9e33c2e503ca681a942e146a4f97b2e8f001e3408a32a674f82af3604a7c4df"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.112666 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6e21-account-create-update-qrl4t" event={"ID":"082348c7-f4ba-4369-a41e-d633a92ef9ec","Type":"ContainerStarted","Data":"c2585fbec4d72d936e175c42bc682b344d0b008e1a6238d2d861b8d3de1f23f2"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.112723 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6e21-account-create-update-qrl4t" event={"ID":"082348c7-f4ba-4369-a41e-d633a92ef9ec","Type":"ContainerStarted","Data":"a7668d47af6df8f829de0c9d0bb554a1fbe25df3ca5d20fb4eeb76d8aeebb16e"} Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.121422 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-d8v5p" podStartSLOduration=2.121407414 podStartE2EDuration="2.121407414s" podCreationTimestamp="2025-12-10 15:37:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:43.119458565 +0000 UTC m=+1037.036405192" watchObservedRunningTime="2025-12-10 15:37:43.121407414 +0000 UTC m=+1037.038354041" Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.153631 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6e21-account-create-update-qrl4t" podStartSLOduration=2.1536106090000002 podStartE2EDuration="2.153610609s" podCreationTimestamp="2025-12-10 15:37:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:43.133185124 +0000 UTC m=+1037.050131761" watchObservedRunningTime="2025-12-10 15:37:43.153610609 +0000 UTC m=+1037.070557236" Dec 10 15:37:43 crc kubenswrapper[4669]: I1210 15:37:43.166611 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-nh95v" podStartSLOduration=2.16658932 podStartE2EDuration="2.16658932s" podCreationTimestamp="2025-12-10 15:37:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:43.156801818 +0000 UTC m=+1037.073748445" watchObservedRunningTime="2025-12-10 15:37:43.16658932 +0000 UTC m=+1037.083535947" Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.269255 4669 generic.go:334] "Generic (PLEG): container finished" podID="343b34aa-07cb-45f2-a070-b64466e0d681" containerID="8a5474a6efb65ced282a5247b68d6dc1b8038e2da99eb63e77c694961b1db6e4" exitCode=0 Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.269733 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nh95v" event={"ID":"343b34aa-07cb-45f2-a070-b64466e0d681","Type":"ContainerDied","Data":"8a5474a6efb65ced282a5247b68d6dc1b8038e2da99eb63e77c694961b1db6e4"} Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.280664 4669 generic.go:334] "Generic (PLEG): container finished" podID="c607262e-1448-458e-9135-1581237f17e7" containerID="fe7878214509e235c1e469cb35161c6ea9ae7b7be6d0f0cdba7dc381b38f2917" exitCode=0 Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.280734 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d8v5p" event={"ID":"c607262e-1448-458e-9135-1581237f17e7","Type":"ContainerDied","Data":"fe7878214509e235c1e469cb35161c6ea9ae7b7be6d0f0cdba7dc381b38f2917"} Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.283185 4669 generic.go:334] "Generic (PLEG): container finished" podID="968aa77c-143b-4324-9736-6b9698cc2867" containerID="98f09f7d8047752cfb8a0155a415e8f73bdb5cc5b16223ff4302de518ef77ef9" exitCode=0 Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.283332 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-beb3-account-create-update-ckz64" event={"ID":"968aa77c-143b-4324-9736-6b9698cc2867","Type":"ContainerDied","Data":"98f09f7d8047752cfb8a0155a415e8f73bdb5cc5b16223ff4302de518ef77ef9"} Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.311751 4669 generic.go:334] "Generic (PLEG): container finished" podID="082348c7-f4ba-4369-a41e-d633a92ef9ec" containerID="c2585fbec4d72d936e175c42bc682b344d0b008e1a6238d2d861b8d3de1f23f2" exitCode=0 Dec 10 15:37:44 crc kubenswrapper[4669]: I1210 15:37:44.311794 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6e21-account-create-update-qrl4t" event={"ID":"082348c7-f4ba-4369-a41e-d633a92ef9ec","Type":"ContainerDied","Data":"c2585fbec4d72d936e175c42bc682b344d0b008e1a6238d2d861b8d3de1f23f2"} Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.767735 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.914547 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.921819 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.935494 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nh95v" Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.967796 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c607262e-1448-458e-9135-1581237f17e7-operator-scripts\") pod \"c607262e-1448-458e-9135-1581237f17e7\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.967972 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzsvj\" (UniqueName: \"kubernetes.io/projected/c607262e-1448-458e-9135-1581237f17e7-kube-api-access-qzsvj\") pod \"c607262e-1448-458e-9135-1581237f17e7\" (UID: \"c607262e-1448-458e-9135-1581237f17e7\") " Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.968556 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c607262e-1448-458e-9135-1581237f17e7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c607262e-1448-458e-9135-1581237f17e7" (UID: "c607262e-1448-458e-9135-1581237f17e7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:45 crc kubenswrapper[4669]: I1210 15:37:45.972594 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c607262e-1448-458e-9135-1581237f17e7-kube-api-access-qzsvj" (OuterVolumeSpecName: "kube-api-access-qzsvj") pod "c607262e-1448-458e-9135-1581237f17e7" (UID: "c607262e-1448-458e-9135-1581237f17e7"). InnerVolumeSpecName "kube-api-access-qzsvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.071788 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n22hm\" (UniqueName: \"kubernetes.io/projected/968aa77c-143b-4324-9736-6b9698cc2867-kube-api-access-n22hm\") pod \"968aa77c-143b-4324-9736-6b9698cc2867\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.071849 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hlx6\" (UniqueName: \"kubernetes.io/projected/082348c7-f4ba-4369-a41e-d633a92ef9ec-kube-api-access-2hlx6\") pod \"082348c7-f4ba-4369-a41e-d633a92ef9ec\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.071887 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/968aa77c-143b-4324-9736-6b9698cc2867-operator-scripts\") pod \"968aa77c-143b-4324-9736-6b9698cc2867\" (UID: \"968aa77c-143b-4324-9736-6b9698cc2867\") " Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.072541 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/968aa77c-143b-4324-9736-6b9698cc2867-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "968aa77c-143b-4324-9736-6b9698cc2867" (UID: "968aa77c-143b-4324-9736-6b9698cc2867"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.072615 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxhk2\" (UniqueName: \"kubernetes.io/projected/343b34aa-07cb-45f2-a070-b64466e0d681-kube-api-access-rxhk2\") pod \"343b34aa-07cb-45f2-a070-b64466e0d681\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.072690 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/343b34aa-07cb-45f2-a070-b64466e0d681-operator-scripts\") pod \"343b34aa-07cb-45f2-a070-b64466e0d681\" (UID: \"343b34aa-07cb-45f2-a070-b64466e0d681\") " Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.072746 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082348c7-f4ba-4369-a41e-d633a92ef9ec-operator-scripts\") pod \"082348c7-f4ba-4369-a41e-d633a92ef9ec\" (UID: \"082348c7-f4ba-4369-a41e-d633a92ef9ec\") " Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073027 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/343b34aa-07cb-45f2-a070-b64466e0d681-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "343b34aa-07cb-45f2-a070-b64466e0d681" (UID: "343b34aa-07cb-45f2-a070-b64466e0d681"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073246 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/082348c7-f4ba-4369-a41e-d633a92ef9ec-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "082348c7-f4ba-4369-a41e-d633a92ef9ec" (UID: "082348c7-f4ba-4369-a41e-d633a92ef9ec"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073556 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/082348c7-f4ba-4369-a41e-d633a92ef9ec-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073572 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c607262e-1448-458e-9135-1581237f17e7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073582 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzsvj\" (UniqueName: \"kubernetes.io/projected/c607262e-1448-458e-9135-1581237f17e7-kube-api-access-qzsvj\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073593 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/968aa77c-143b-4324-9736-6b9698cc2867-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.073602 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/343b34aa-07cb-45f2-a070-b64466e0d681-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.074884 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/082348c7-f4ba-4369-a41e-d633a92ef9ec-kube-api-access-2hlx6" (OuterVolumeSpecName: "kube-api-access-2hlx6") pod "082348c7-f4ba-4369-a41e-d633a92ef9ec" (UID: "082348c7-f4ba-4369-a41e-d633a92ef9ec"). InnerVolumeSpecName "kube-api-access-2hlx6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.075497 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/343b34aa-07cb-45f2-a070-b64466e0d681-kube-api-access-rxhk2" (OuterVolumeSpecName: "kube-api-access-rxhk2") pod "343b34aa-07cb-45f2-a070-b64466e0d681" (UID: "343b34aa-07cb-45f2-a070-b64466e0d681"). InnerVolumeSpecName "kube-api-access-rxhk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.075693 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/968aa77c-143b-4324-9736-6b9698cc2867-kube-api-access-n22hm" (OuterVolumeSpecName: "kube-api-access-n22hm") pod "968aa77c-143b-4324-9736-6b9698cc2867" (UID: "968aa77c-143b-4324-9736-6b9698cc2867"). InnerVolumeSpecName "kube-api-access-n22hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.175182 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n22hm\" (UniqueName: \"kubernetes.io/projected/968aa77c-143b-4324-9736-6b9698cc2867-kube-api-access-n22hm\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.175236 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hlx6\" (UniqueName: \"kubernetes.io/projected/082348c7-f4ba-4369-a41e-d633a92ef9ec-kube-api-access-2hlx6\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.175251 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxhk2\" (UniqueName: \"kubernetes.io/projected/343b34aa-07cb-45f2-a070-b64466e0d681-kube-api-access-rxhk2\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.331793 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-nh95v" event={"ID":"343b34aa-07cb-45f2-a070-b64466e0d681","Type":"ContainerDied","Data":"f9e33c2e503ca681a942e146a4f97b2e8f001e3408a32a674f82af3604a7c4df"} Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.331834 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9e33c2e503ca681a942e146a4f97b2e8f001e3408a32a674f82af3604a7c4df" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.331843 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-nh95v" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.333477 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6e21-account-create-update-qrl4t" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.333490 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6e21-account-create-update-qrl4t" event={"ID":"082348c7-f4ba-4369-a41e-d633a92ef9ec","Type":"ContainerDied","Data":"a7668d47af6df8f829de0c9d0bb554a1fbe25df3ca5d20fb4eeb76d8aeebb16e"} Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.333512 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7668d47af6df8f829de0c9d0bb554a1fbe25df3ca5d20fb4eeb76d8aeebb16e" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.335040 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-d8v5p" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.335064 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-d8v5p" event={"ID":"c607262e-1448-458e-9135-1581237f17e7","Type":"ContainerDied","Data":"12d2bd7755622731652016be31ba548a4ad9f9ce0ff9799358baecf0ff3f84a3"} Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.335092 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12d2bd7755622731652016be31ba548a4ad9f9ce0ff9799358baecf0ff3f84a3" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.336419 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-beb3-account-create-update-ckz64" event={"ID":"968aa77c-143b-4324-9736-6b9698cc2867","Type":"ContainerDied","Data":"e0b2fa8529d7b96443417a171878d516616e845dda2c1e0c94c8412f27f3bccb"} Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.336439 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-beb3-account-create-update-ckz64" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.336453 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0b2fa8529d7b96443417a171878d516616e845dda2c1e0c94c8412f27f3bccb" Dec 10 15:37:46 crc kubenswrapper[4669]: I1210 15:37:46.546365 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154115 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-nt2bl"] Dec 10 15:37:47 crc kubenswrapper[4669]: E1210 15:37:47.154510 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="968aa77c-143b-4324-9736-6b9698cc2867" containerName="mariadb-account-create-update" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154526 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="968aa77c-143b-4324-9736-6b9698cc2867" containerName="mariadb-account-create-update" Dec 10 15:37:47 crc kubenswrapper[4669]: E1210 15:37:47.154540 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="082348c7-f4ba-4369-a41e-d633a92ef9ec" containerName="mariadb-account-create-update" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154549 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="082348c7-f4ba-4369-a41e-d633a92ef9ec" containerName="mariadb-account-create-update" Dec 10 15:37:47 crc kubenswrapper[4669]: E1210 15:37:47.154568 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c607262e-1448-458e-9135-1581237f17e7" containerName="mariadb-database-create" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154577 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c607262e-1448-458e-9135-1581237f17e7" containerName="mariadb-database-create" Dec 10 15:37:47 crc kubenswrapper[4669]: E1210 15:37:47.154593 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="343b34aa-07cb-45f2-a070-b64466e0d681" containerName="mariadb-database-create" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154602 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="343b34aa-07cb-45f2-a070-b64466e0d681" containerName="mariadb-database-create" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154791 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="343b34aa-07cb-45f2-a070-b64466e0d681" containerName="mariadb-database-create" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154808 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c607262e-1448-458e-9135-1581237f17e7" containerName="mariadb-database-create" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154829 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="082348c7-f4ba-4369-a41e-d633a92ef9ec" containerName="mariadb-account-create-update" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.154840 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="968aa77c-143b-4324-9736-6b9698cc2867" containerName="mariadb-account-create-update" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.155456 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.164550 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nt2bl"] Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.184372 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.238314 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-8j42d"] Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.298830 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp89f\" (UniqueName: \"kubernetes.io/projected/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-kube-api-access-tp89f\") pod \"glance-db-create-nt2bl\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.298900 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-operator-scripts\") pod \"glance-db-create-nt2bl\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.343459 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" containerName="dnsmasq-dns" containerID="cri-o://7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432" gracePeriod=10 Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.400457 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-operator-scripts\") pod \"glance-db-create-nt2bl\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.400914 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp89f\" (UniqueName: \"kubernetes.io/projected/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-kube-api-access-tp89f\") pod \"glance-db-create-nt2bl\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.401305 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-operator-scripts\") pod \"glance-db-create-nt2bl\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.429600 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp89f\" (UniqueName: \"kubernetes.io/projected/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-kube-api-access-tp89f\") pod \"glance-db-create-nt2bl\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.471690 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:47 crc kubenswrapper[4669]: I1210 15:37:47.951033 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.013209 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-dns-svc\") pod \"c83e03df-8360-45d3-b6c7-66b70de295db\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.013338 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-ovsdbserver-nb\") pod \"c83e03df-8360-45d3-b6c7-66b70de295db\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.013366 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-config\") pod \"c83e03df-8360-45d3-b6c7-66b70de295db\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " Dec 10 15:37:48 crc kubenswrapper[4669]: W1210 15:37:48.059071 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4a6b5304_e7ea_4f27_b68e_20da85f0f6f0.slice/crio-0a6c2dee2b12f9d0a06137bb7d036c8c72cb9db229ccae8a9513f01f81d4af37 WatchSource:0}: Error finding container 0a6c2dee2b12f9d0a06137bb7d036c8c72cb9db229ccae8a9513f01f81d4af37: Status 404 returned error can't find the container with id 0a6c2dee2b12f9d0a06137bb7d036c8c72cb9db229ccae8a9513f01f81d4af37 Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.059570 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-nt2bl"] Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.081006 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-config" (OuterVolumeSpecName: "config") pod "c83e03df-8360-45d3-b6c7-66b70de295db" (UID: "c83e03df-8360-45d3-b6c7-66b70de295db"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.113563 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c83e03df-8360-45d3-b6c7-66b70de295db" (UID: "c83e03df-8360-45d3-b6c7-66b70de295db"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.114523 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q994p\" (UniqueName: \"kubernetes.io/projected/c83e03df-8360-45d3-b6c7-66b70de295db-kube-api-access-q994p\") pod \"c83e03df-8360-45d3-b6c7-66b70de295db\" (UID: \"c83e03df-8360-45d3-b6c7-66b70de295db\") " Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.114837 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.114868 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.116616 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c83e03df-8360-45d3-b6c7-66b70de295db" (UID: "c83e03df-8360-45d3-b6c7-66b70de295db"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.118120 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c83e03df-8360-45d3-b6c7-66b70de295db-kube-api-access-q994p" (OuterVolumeSpecName: "kube-api-access-q994p") pod "c83e03df-8360-45d3-b6c7-66b70de295db" (UID: "c83e03df-8360-45d3-b6c7-66b70de295db"). InnerVolumeSpecName "kube-api-access-q994p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.216264 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c83e03df-8360-45d3-b6c7-66b70de295db-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.216302 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q994p\" (UniqueName: \"kubernetes.io/projected/c83e03df-8360-45d3-b6c7-66b70de295db-kube-api-access-q994p\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.351001 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nt2bl" event={"ID":"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0","Type":"ContainerStarted","Data":"7441f745a917595047737e61341236a737ce951642885aa415102c63fce431c1"} Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.351043 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nt2bl" event={"ID":"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0","Type":"ContainerStarted","Data":"0a6c2dee2b12f9d0a06137bb7d036c8c72cb9db229ccae8a9513f01f81d4af37"} Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.352788 4669 generic.go:334] "Generic (PLEG): container finished" podID="c83e03df-8360-45d3-b6c7-66b70de295db" containerID="7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432" exitCode=0 Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.352831 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" event={"ID":"c83e03df-8360-45d3-b6c7-66b70de295db","Type":"ContainerDied","Data":"7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432"} Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.352859 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" event={"ID":"c83e03df-8360-45d3-b6c7-66b70de295db","Type":"ContainerDied","Data":"8f383cebf60fd70821463d37819fa6ea5588a2c41d7809392a7ddcaafd509cec"} Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.352877 4669 scope.go:117] "RemoveContainer" containerID="7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.352908 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-8j42d" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.368709 4669 scope.go:117] "RemoveContainer" containerID="befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.371693 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-nt2bl" podStartSLOduration=1.371676258 podStartE2EDuration="1.371676258s" podCreationTimestamp="2025-12-10 15:37:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:37:48.365196929 +0000 UTC m=+1042.282143556" watchObservedRunningTime="2025-12-10 15:37:48.371676258 +0000 UTC m=+1042.288622885" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.394964 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-8j42d"] Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.399043 4669 scope.go:117] "RemoveContainer" containerID="7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432" Dec 10 15:37:48 crc kubenswrapper[4669]: E1210 15:37:48.399987 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432\": container with ID starting with 7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432 not found: ID does not exist" containerID="7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.400020 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432"} err="failed to get container status \"7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432\": rpc error: code = NotFound desc = could not find container \"7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432\": container with ID starting with 7bd5161969f996fb44f5a4599b05b0ac96bb6bed419a4527d2d5a95086c68432 not found: ID does not exist" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.400044 4669 scope.go:117] "RemoveContainer" containerID="befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee" Dec 10 15:37:48 crc kubenswrapper[4669]: E1210 15:37:48.400631 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee\": container with ID starting with befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee not found: ID does not exist" containerID="befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.400648 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee"} err="failed to get container status \"befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee\": rpc error: code = NotFound desc = could not find container \"befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee\": container with ID starting with befc2fdb4efe00a8ffce497d3c12554823dfe1a8b1313b6a6935c308d974f4ee not found: ID does not exist" Dec 10 15:37:48 crc kubenswrapper[4669]: I1210 15:37:48.412237 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-8j42d"] Dec 10 15:37:49 crc kubenswrapper[4669]: I1210 15:37:49.361578 4669 generic.go:334] "Generic (PLEG): container finished" podID="4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" containerID="7441f745a917595047737e61341236a737ce951642885aa415102c63fce431c1" exitCode=0 Dec 10 15:37:49 crc kubenswrapper[4669]: I1210 15:37:49.361683 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nt2bl" event={"ID":"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0","Type":"ContainerDied","Data":"7441f745a917595047737e61341236a737ce951642885aa415102c63fce431c1"} Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.418652 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" path="/var/lib/kubelet/pods/c83e03df-8360-45d3-b6c7-66b70de295db/volumes" Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.706259 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.855284 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tp89f\" (UniqueName: \"kubernetes.io/projected/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-kube-api-access-tp89f\") pod \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.855576 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-operator-scripts\") pod \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\" (UID: \"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0\") " Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.856032 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" (UID: "4a6b5304-e7ea-4f27-b68e-20da85f0f6f0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.868664 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-kube-api-access-tp89f" (OuterVolumeSpecName: "kube-api-access-tp89f") pod "4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" (UID: "4a6b5304-e7ea-4f27-b68e-20da85f0f6f0"). InnerVolumeSpecName "kube-api-access-tp89f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.957351 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tp89f\" (UniqueName: \"kubernetes.io/projected/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-kube-api-access-tp89f\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:50 crc kubenswrapper[4669]: I1210 15:37:50.957380 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:37:51 crc kubenswrapper[4669]: I1210 15:37:51.384963 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-nt2bl" event={"ID":"4a6b5304-e7ea-4f27-b68e-20da85f0f6f0","Type":"ContainerDied","Data":"0a6c2dee2b12f9d0a06137bb7d036c8c72cb9db229ccae8a9513f01f81d4af37"} Dec 10 15:37:51 crc kubenswrapper[4669]: I1210 15:37:51.385009 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a6c2dee2b12f9d0a06137bb7d036c8c72cb9db229ccae8a9513f01f81d4af37" Dec 10 15:37:51 crc kubenswrapper[4669]: I1210 15:37:51.385059 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-nt2bl" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.719986 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-d8jx6"] Dec 10 15:37:52 crc kubenswrapper[4669]: E1210 15:37:52.721692 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" containerName="init" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.721982 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" containerName="init" Dec 10 15:37:52 crc kubenswrapper[4669]: E1210 15:37:52.722090 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" containerName="mariadb-database-create" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.722161 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" containerName="mariadb-database-create" Dec 10 15:37:52 crc kubenswrapper[4669]: E1210 15:37:52.722279 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" containerName="dnsmasq-dns" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.722338 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" containerName="dnsmasq-dns" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.722621 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c83e03df-8360-45d3-b6c7-66b70de295db" containerName="dnsmasq-dns" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.722751 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" containerName="mariadb-database-create" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.723562 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.725787 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-dzg4j" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.727170 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.742329 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-d8jx6"] Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.787872 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-combined-ca-bundle\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.787965 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-db-sync-config-data\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.788002 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2jn7t\" (UniqueName: \"kubernetes.io/projected/2f0228ea-8dff-4494-925c-db481a3235e8-kube-api-access-2jn7t\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.788161 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-config-data\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.888648 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-config-data\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.888724 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-combined-ca-bundle\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.888768 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-db-sync-config-data\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.888800 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2jn7t\" (UniqueName: \"kubernetes.io/projected/2f0228ea-8dff-4494-925c-db481a3235e8-kube-api-access-2jn7t\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.899016 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-db-sync-config-data\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.907407 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-combined-ca-bundle\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.907778 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-config-data\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:52 crc kubenswrapper[4669]: I1210 15:37:52.908121 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2jn7t\" (UniqueName: \"kubernetes.io/projected/2f0228ea-8dff-4494-925c-db481a3235e8-kube-api-access-2jn7t\") pod \"glance-db-sync-d8jx6\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:53 crc kubenswrapper[4669]: I1210 15:37:53.006766 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 10 15:37:53 crc kubenswrapper[4669]: I1210 15:37:53.056081 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d8jx6" Dec 10 15:37:53 crc kubenswrapper[4669]: I1210 15:37:53.736833 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-d8jx6"] Dec 10 15:37:54 crc kubenswrapper[4669]: I1210 15:37:54.416999 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d8jx6" event={"ID":"2f0228ea-8dff-4494-925c-db481a3235e8","Type":"ContainerStarted","Data":"f33c0e340cd8bae8c91c12915cde3c53805d5ddba3da2512c06073e24dec1008"} Dec 10 15:37:54 crc kubenswrapper[4669]: I1210 15:37:54.419916 4669 generic.go:334] "Generic (PLEG): container finished" podID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerID="f1101581d3a1607b98d6dbff73d1eae1d9b3c53a86cade61351b25b3c4420773" exitCode=0 Dec 10 15:37:54 crc kubenswrapper[4669]: I1210 15:37:54.419979 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7714ed30-3730-4a63-8d4d-2b7e097cadbc","Type":"ContainerDied","Data":"f1101581d3a1607b98d6dbff73d1eae1d9b3c53a86cade61351b25b3c4420773"} Dec 10 15:37:55 crc kubenswrapper[4669]: I1210 15:37:55.429897 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7714ed30-3730-4a63-8d4d-2b7e097cadbc","Type":"ContainerStarted","Data":"a78364c470182cf19c6d95b01f8816089a6bc59167765da5cec24e56ca46dcb6"} Dec 10 15:37:55 crc kubenswrapper[4669]: I1210 15:37:55.431077 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:37:55 crc kubenswrapper[4669]: I1210 15:37:55.463211 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=39.403116291 podStartE2EDuration="1m8.463188236s" podCreationTimestamp="2025-12-10 15:36:47 +0000 UTC" firstStartedPulling="2025-12-10 15:36:49.245566097 +0000 UTC m=+983.162512724" lastFinishedPulling="2025-12-10 15:37:18.305638032 +0000 UTC m=+1012.222584669" observedRunningTime="2025-12-10 15:37:55.455386864 +0000 UTC m=+1049.372333491" watchObservedRunningTime="2025-12-10 15:37:55.463188236 +0000 UTC m=+1049.380134863" Dec 10 15:37:58 crc kubenswrapper[4669]: I1210 15:37:58.277909 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-j9nmw" podUID="f79f439d-6ac0-4ebc-8ac8-1023ec207254" containerName="ovn-controller" probeResult="failure" output=< Dec 10 15:37:58 crc kubenswrapper[4669]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 10 15:37:58 crc kubenswrapper[4669]: > Dec 10 15:37:58 crc kubenswrapper[4669]: I1210 15:37:58.530030 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:37:59 crc kubenswrapper[4669]: I1210 15:37:59.467629 4669 generic.go:334] "Generic (PLEG): container finished" podID="b359c954-51b4-401c-a783-f0220d650a4b" containerID="f70abc287368477df634eb5a5310c6298fb195497f7e904832ecee389e85ffea" exitCode=0 Dec 10 15:37:59 crc kubenswrapper[4669]: I1210 15:37:59.467726 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b359c954-51b4-401c-a783-f0220d650a4b","Type":"ContainerDied","Data":"f70abc287368477df634eb5a5310c6298fb195497f7e904832ecee389e85ffea"} Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.274583 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-j9nmw" podUID="f79f439d-6ac0-4ebc-8ac8-1023ec207254" containerName="ovn-controller" probeResult="failure" output=< Dec 10 15:38:03 crc kubenswrapper[4669]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 10 15:38:03 crc kubenswrapper[4669]: > Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.484633 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-bbqvq" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.688435 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j9nmw-config-zmr7k"] Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.689791 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.695421 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j9nmw-config-zmr7k"] Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.695959 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.796177 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run-ovn\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.796336 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhhwt\" (UniqueName: \"kubernetes.io/projected/1eb381ea-587f-4e08-ad04-92c2dda07430-kube-api-access-fhhwt\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.796371 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-scripts\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.796438 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.796489 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-additional-scripts\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.796554 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-log-ovn\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.898995 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-scripts\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899258 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899448 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-additional-scripts\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899566 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-log-ovn\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899671 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899723 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run-ovn\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899946 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-log-ovn\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.899998 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run-ovn\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.903161 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-additional-scripts\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.903433 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhhwt\" (UniqueName: \"kubernetes.io/projected/1eb381ea-587f-4e08-ad04-92c2dda07430-kube-api-access-fhhwt\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.903839 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-scripts\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:03 crc kubenswrapper[4669]: I1210 15:38:03.923745 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhhwt\" (UniqueName: \"kubernetes.io/projected/1eb381ea-587f-4e08-ad04-92c2dda07430-kube-api-access-fhhwt\") pod \"ovn-controller-j9nmw-config-zmr7k\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:04 crc kubenswrapper[4669]: I1210 15:38:04.047898 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:07 crc kubenswrapper[4669]: I1210 15:38:07.565572 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b359c954-51b4-401c-a783-f0220d650a4b","Type":"ContainerStarted","Data":"7827653ca7ec12642be72fd58ebf8511ed5d0a419bc8cc09d930e860a83e9513"} Dec 10 15:38:07 crc kubenswrapper[4669]: I1210 15:38:07.567168 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 10 15:38:07 crc kubenswrapper[4669]: I1210 15:38:07.592866 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371955.261932 podStartE2EDuration="1m21.59284268s" podCreationTimestamp="2025-12-10 15:36:46 +0000 UTC" firstStartedPulling="2025-12-10 15:36:48.818528519 +0000 UTC m=+982.735475146" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:07.584808272 +0000 UTC m=+1061.501754909" watchObservedRunningTime="2025-12-10 15:38:07.59284268 +0000 UTC m=+1061.509789317" Dec 10 15:38:07 crc kubenswrapper[4669]: I1210 15:38:07.623445 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j9nmw-config-zmr7k"] Dec 10 15:38:07 crc kubenswrapper[4669]: W1210 15:38:07.632326 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1eb381ea_587f_4e08_ad04_92c2dda07430.slice/crio-c7fb2735f30daa7dfb81f6f697f96f76688c72812d9cf5ef8734b199bb041421 WatchSource:0}: Error finding container c7fb2735f30daa7dfb81f6f697f96f76688c72812d9cf5ef8734b199bb041421: Status 404 returned error can't find the container with id c7fb2735f30daa7dfb81f6f697f96f76688c72812d9cf5ef8734b199bb041421 Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.277821 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-j9nmw" Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.575576 4669 generic.go:334] "Generic (PLEG): container finished" podID="1eb381ea-587f-4e08-ad04-92c2dda07430" containerID="5aace8ea46b46968de06d3d4d40e3c80a71c4a88d9aa87a10a380ec4ee021f02" exitCode=0 Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.575633 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw-config-zmr7k" event={"ID":"1eb381ea-587f-4e08-ad04-92c2dda07430","Type":"ContainerDied","Data":"5aace8ea46b46968de06d3d4d40e3c80a71c4a88d9aa87a10a380ec4ee021f02"} Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.575659 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw-config-zmr7k" event={"ID":"1eb381ea-587f-4e08-ad04-92c2dda07430","Type":"ContainerStarted","Data":"c7fb2735f30daa7dfb81f6f697f96f76688c72812d9cf5ef8734b199bb041421"} Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.577175 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d8jx6" event={"ID":"2f0228ea-8dff-4494-925c-db481a3235e8","Type":"ContainerStarted","Data":"d62249d2b6aa40b34211d9709ebe8bfc27d361e819cb2f1aafc654b80a9f1ff2"} Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.617157 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-d8jx6" podStartSLOduration=3.05273319 podStartE2EDuration="16.617137257s" podCreationTimestamp="2025-12-10 15:37:52 +0000 UTC" firstStartedPulling="2025-12-10 15:37:53.739708232 +0000 UTC m=+1047.656654859" lastFinishedPulling="2025-12-10 15:38:07.304112279 +0000 UTC m=+1061.221058926" observedRunningTime="2025-12-10 15:38:08.614768288 +0000 UTC m=+1062.531714935" watchObservedRunningTime="2025-12-10 15:38:08.617137257 +0000 UTC m=+1062.534083884" Dec 10 15:38:08 crc kubenswrapper[4669]: I1210 15:38:08.655445 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:38:09 crc kubenswrapper[4669]: I1210 15:38:09.924353 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.019312 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-scripts\") pod \"1eb381ea-587f-4e08-ad04-92c2dda07430\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.019553 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run-ovn\") pod \"1eb381ea-587f-4e08-ad04-92c2dda07430\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.019572 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run\") pod \"1eb381ea-587f-4e08-ad04-92c2dda07430\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.019652 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-log-ovn\") pod \"1eb381ea-587f-4e08-ad04-92c2dda07430\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.019681 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-additional-scripts\") pod \"1eb381ea-587f-4e08-ad04-92c2dda07430\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.019702 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhhwt\" (UniqueName: \"kubernetes.io/projected/1eb381ea-587f-4e08-ad04-92c2dda07430-kube-api-access-fhhwt\") pod \"1eb381ea-587f-4e08-ad04-92c2dda07430\" (UID: \"1eb381ea-587f-4e08-ad04-92c2dda07430\") " Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.020357 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run" (OuterVolumeSpecName: "var-run") pod "1eb381ea-587f-4e08-ad04-92c2dda07430" (UID: "1eb381ea-587f-4e08-ad04-92c2dda07430"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.020379 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1eb381ea-587f-4e08-ad04-92c2dda07430" (UID: "1eb381ea-587f-4e08-ad04-92c2dda07430"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.020421 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1eb381ea-587f-4e08-ad04-92c2dda07430" (UID: "1eb381ea-587f-4e08-ad04-92c2dda07430"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.020440 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-scripts" (OuterVolumeSpecName: "scripts") pod "1eb381ea-587f-4e08-ad04-92c2dda07430" (UID: "1eb381ea-587f-4e08-ad04-92c2dda07430"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.020739 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1eb381ea-587f-4e08-ad04-92c2dda07430" (UID: "1eb381ea-587f-4e08-ad04-92c2dda07430"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.025252 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1eb381ea-587f-4e08-ad04-92c2dda07430-kube-api-access-fhhwt" (OuterVolumeSpecName: "kube-api-access-fhhwt") pod "1eb381ea-587f-4e08-ad04-92c2dda07430" (UID: "1eb381ea-587f-4e08-ad04-92c2dda07430"). InnerVolumeSpecName "kube-api-access-fhhwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.121844 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.121877 4669 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.121887 4669 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.121896 4669 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1eb381ea-587f-4e08-ad04-92c2dda07430-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.121908 4669 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1eb381ea-587f-4e08-ad04-92c2dda07430-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.121919 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhhwt\" (UniqueName: \"kubernetes.io/projected/1eb381ea-587f-4e08-ad04-92c2dda07430-kube-api-access-fhhwt\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.592336 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw-config-zmr7k" event={"ID":"1eb381ea-587f-4e08-ad04-92c2dda07430","Type":"ContainerDied","Data":"c7fb2735f30daa7dfb81f6f697f96f76688c72812d9cf5ef8734b199bb041421"} Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.592390 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c7fb2735f30daa7dfb81f6f697f96f76688c72812d9cf5ef8734b199bb041421" Dec 10 15:38:10 crc kubenswrapper[4669]: I1210 15:38:10.592671 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-zmr7k" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.067300 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-j9nmw-config-zmr7k"] Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.075149 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-j9nmw-config-zmr7k"] Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.167060 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-j9nmw-config-654sw"] Dec 10 15:38:11 crc kubenswrapper[4669]: E1210 15:38:11.167475 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1eb381ea-587f-4e08-ad04-92c2dda07430" containerName="ovn-config" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.167500 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1eb381ea-587f-4e08-ad04-92c2dda07430" containerName="ovn-config" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.167698 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1eb381ea-587f-4e08-ad04-92c2dda07430" containerName="ovn-config" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.168344 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.179675 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.186745 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j9nmw-config-654sw"] Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.240738 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.240782 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-scripts\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.240811 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-additional-scripts\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.241064 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-log-ovn\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.241132 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmgd4\" (UniqueName: \"kubernetes.io/projected/e0a5188d-71e0-4f22-8506-2f9cd65436bd-kube-api-access-pmgd4\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.241200 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run-ovn\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.342299 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-log-ovn\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.342604 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmgd4\" (UniqueName: \"kubernetes.io/projected/e0a5188d-71e0-4f22-8506-2f9cd65436bd-kube-api-access-pmgd4\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.342694 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-log-ovn\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.342859 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run-ovn\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.342975 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run-ovn\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.343112 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.343239 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-scripts\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.343369 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-additional-scripts\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.343249 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.344012 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-additional-scripts\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.345358 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-scripts\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.371395 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmgd4\" (UniqueName: \"kubernetes.io/projected/e0a5188d-71e0-4f22-8506-2f9cd65436bd-kube-api-access-pmgd4\") pod \"ovn-controller-j9nmw-config-654sw\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.482999 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:11 crc kubenswrapper[4669]: I1210 15:38:11.960648 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-j9nmw-config-654sw"] Dec 10 15:38:12 crc kubenswrapper[4669]: I1210 15:38:12.407281 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1eb381ea-587f-4e08-ad04-92c2dda07430" path="/var/lib/kubelet/pods/1eb381ea-587f-4e08-ad04-92c2dda07430/volumes" Dec 10 15:38:12 crc kubenswrapper[4669]: I1210 15:38:12.632034 4669 generic.go:334] "Generic (PLEG): container finished" podID="e0a5188d-71e0-4f22-8506-2f9cd65436bd" containerID="e138e24d0576bed82ef59cec3da33bc58610b3784ab3ebf06245daf302b1ff0d" exitCode=0 Dec 10 15:38:12 crc kubenswrapper[4669]: I1210 15:38:12.632390 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw-config-654sw" event={"ID":"e0a5188d-71e0-4f22-8506-2f9cd65436bd","Type":"ContainerDied","Data":"e138e24d0576bed82ef59cec3da33bc58610b3784ab3ebf06245daf302b1ff0d"} Dec 10 15:38:12 crc kubenswrapper[4669]: I1210 15:38:12.632749 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw-config-654sw" event={"ID":"e0a5188d-71e0-4f22-8506-2f9cd65436bd","Type":"ContainerStarted","Data":"2b7504b98bb4711460db6928f35b83b821fcec2ab4e08c20fc524572fe2502e6"} Dec 10 15:38:13 crc kubenswrapper[4669]: I1210 15:38:13.989021 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088242 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run\") pod \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088413 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-scripts\") pod \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088469 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmgd4\" (UniqueName: \"kubernetes.io/projected/e0a5188d-71e0-4f22-8506-2f9cd65436bd-kube-api-access-pmgd4\") pod \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088462 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run" (OuterVolumeSpecName: "var-run") pod "e0a5188d-71e0-4f22-8506-2f9cd65436bd" (UID: "e0a5188d-71e0-4f22-8506-2f9cd65436bd"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088492 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run-ovn\") pod \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088543 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "e0a5188d-71e0-4f22-8506-2f9cd65436bd" (UID: "e0a5188d-71e0-4f22-8506-2f9cd65436bd"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088682 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-log-ovn\") pod \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.088812 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-additional-scripts\") pod \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\" (UID: \"e0a5188d-71e0-4f22-8506-2f9cd65436bd\") " Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.089583 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "e0a5188d-71e0-4f22-8506-2f9cd65436bd" (UID: "e0a5188d-71e0-4f22-8506-2f9cd65436bd"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.089721 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-scripts" (OuterVolumeSpecName: "scripts") pod "e0a5188d-71e0-4f22-8506-2f9cd65436bd" (UID: "e0a5188d-71e0-4f22-8506-2f9cd65436bd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.089740 4669 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.089767 4669 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.089785 4669 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/e0a5188d-71e0-4f22-8506-2f9cd65436bd-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.090081 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "e0a5188d-71e0-4f22-8506-2f9cd65436bd" (UID: "e0a5188d-71e0-4f22-8506-2f9cd65436bd"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.109430 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e0a5188d-71e0-4f22-8506-2f9cd65436bd-kube-api-access-pmgd4" (OuterVolumeSpecName: "kube-api-access-pmgd4") pod "e0a5188d-71e0-4f22-8506-2f9cd65436bd" (UID: "e0a5188d-71e0-4f22-8506-2f9cd65436bd"). InnerVolumeSpecName "kube-api-access-pmgd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.192067 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.192115 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmgd4\" (UniqueName: \"kubernetes.io/projected/e0a5188d-71e0-4f22-8506-2f9cd65436bd-kube-api-access-pmgd4\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.192126 4669 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/e0a5188d-71e0-4f22-8506-2f9cd65436bd-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.656795 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-j9nmw-config-654sw" event={"ID":"e0a5188d-71e0-4f22-8506-2f9cd65436bd","Type":"ContainerDied","Data":"2b7504b98bb4711460db6928f35b83b821fcec2ab4e08c20fc524572fe2502e6"} Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.656833 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2b7504b98bb4711460db6928f35b83b821fcec2ab4e08c20fc524572fe2502e6" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.656845 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-j9nmw-config-654sw" Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.658176 4669 generic.go:334] "Generic (PLEG): container finished" podID="2f0228ea-8dff-4494-925c-db481a3235e8" containerID="d62249d2b6aa40b34211d9709ebe8bfc27d361e819cb2f1aafc654b80a9f1ff2" exitCode=0 Dec 10 15:38:14 crc kubenswrapper[4669]: I1210 15:38:14.658266 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d8jx6" event={"ID":"2f0228ea-8dff-4494-925c-db481a3235e8","Type":"ContainerDied","Data":"d62249d2b6aa40b34211d9709ebe8bfc27d361e819cb2f1aafc654b80a9f1ff2"} Dec 10 15:38:15 crc kubenswrapper[4669]: I1210 15:38:15.068519 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-j9nmw-config-654sw"] Dec 10 15:38:15 crc kubenswrapper[4669]: I1210 15:38:15.075432 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-j9nmw-config-654sw"] Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.138599 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d8jx6" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.229647 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-combined-ca-bundle\") pod \"2f0228ea-8dff-4494-925c-db481a3235e8\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.229707 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-db-sync-config-data\") pod \"2f0228ea-8dff-4494-925c-db481a3235e8\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.229764 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2jn7t\" (UniqueName: \"kubernetes.io/projected/2f0228ea-8dff-4494-925c-db481a3235e8-kube-api-access-2jn7t\") pod \"2f0228ea-8dff-4494-925c-db481a3235e8\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.229785 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-config-data\") pod \"2f0228ea-8dff-4494-925c-db481a3235e8\" (UID: \"2f0228ea-8dff-4494-925c-db481a3235e8\") " Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.235563 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "2f0228ea-8dff-4494-925c-db481a3235e8" (UID: "2f0228ea-8dff-4494-925c-db481a3235e8"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.236295 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f0228ea-8dff-4494-925c-db481a3235e8-kube-api-access-2jn7t" (OuterVolumeSpecName: "kube-api-access-2jn7t") pod "2f0228ea-8dff-4494-925c-db481a3235e8" (UID: "2f0228ea-8dff-4494-925c-db481a3235e8"). InnerVolumeSpecName "kube-api-access-2jn7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.255611 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2f0228ea-8dff-4494-925c-db481a3235e8" (UID: "2f0228ea-8dff-4494-925c-db481a3235e8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.269859 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-config-data" (OuterVolumeSpecName: "config-data") pod "2f0228ea-8dff-4494-925c-db481a3235e8" (UID: "2f0228ea-8dff-4494-925c-db481a3235e8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.331924 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.331962 4669 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.331971 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2jn7t\" (UniqueName: \"kubernetes.io/projected/2f0228ea-8dff-4494-925c-db481a3235e8-kube-api-access-2jn7t\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.331981 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2f0228ea-8dff-4494-925c-db481a3235e8-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.407275 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e0a5188d-71e0-4f22-8506-2f9cd65436bd" path="/var/lib/kubelet/pods/e0a5188d-71e0-4f22-8506-2f9cd65436bd/volumes" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.679952 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-d8jx6" event={"ID":"2f0228ea-8dff-4494-925c-db481a3235e8","Type":"ContainerDied","Data":"f33c0e340cd8bae8c91c12915cde3c53805d5ddba3da2512c06073e24dec1008"} Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.679991 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f33c0e340cd8bae8c91c12915cde3c53805d5ddba3da2512c06073e24dec1008" Dec 10 15:38:16 crc kubenswrapper[4669]: I1210 15:38:16.680068 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-d8jx6" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.222415 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-2xd8l"] Dec 10 15:38:17 crc kubenswrapper[4669]: E1210 15:38:17.222752 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e0a5188d-71e0-4f22-8506-2f9cd65436bd" containerName="ovn-config" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.222763 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e0a5188d-71e0-4f22-8506-2f9cd65436bd" containerName="ovn-config" Dec 10 15:38:17 crc kubenswrapper[4669]: E1210 15:38:17.222776 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f0228ea-8dff-4494-925c-db481a3235e8" containerName="glance-db-sync" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.222782 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f0228ea-8dff-4494-925c-db481a3235e8" containerName="glance-db-sync" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.222919 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f0228ea-8dff-4494-925c-db481a3235e8" containerName="glance-db-sync" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.222935 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e0a5188d-71e0-4f22-8506-2f9cd65436bd" containerName="ovn-config" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.223691 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.246031 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-config\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.246096 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-dns-svc\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.246191 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.246252 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68lsm\" (UniqueName: \"kubernetes.io/projected/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-kube-api-access-68lsm\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.246359 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.296323 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-2xd8l"] Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.347445 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.347671 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68lsm\" (UniqueName: \"kubernetes.io/projected/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-kube-api-access-68lsm\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.347800 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.347869 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-config\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.347974 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-dns-svc\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.348933 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-sb\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.349641 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-nb\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.349896 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-config\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.350284 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-dns-svc\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.365365 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68lsm\" (UniqueName: \"kubernetes.io/projected/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-kube-api-access-68lsm\") pod \"dnsmasq-dns-554567b4f7-2xd8l\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:17 crc kubenswrapper[4669]: I1210 15:38:17.551796 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.020553 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-2xd8l"] Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.236546 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.704771 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-2xgm4"] Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.706236 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.714864 4669 generic.go:334] "Generic (PLEG): container finished" podID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerID="51fc5d636518913c3a6205c5a1fa9b8f2a00c1d19de43a6f85c4d275140c823d" exitCode=0 Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.714911 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" event={"ID":"8c5beee8-d55f-4b9a-bf26-1207a5a6264f","Type":"ContainerDied","Data":"51fc5d636518913c3a6205c5a1fa9b8f2a00c1d19de43a6f85c4d275140c823d"} Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.714940 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" event={"ID":"8c5beee8-d55f-4b9a-bf26-1207a5a6264f","Type":"ContainerStarted","Data":"1e54eae227f0d85da5cc32a6bd4d6608bcb9e9dc491f34004b0b2eb4f811ea85"} Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.736601 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-2xgm4"] Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.771894 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55391db7-2483-46c6-9d71-0915d81eb5ee-operator-scripts\") pod \"cinder-db-create-2xgm4\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.772086 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zzqc5\" (UniqueName: \"kubernetes.io/projected/55391db7-2483-46c6-9d71-0915d81eb5ee-kube-api-access-zzqc5\") pod \"cinder-db-create-2xgm4\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.874536 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zzqc5\" (UniqueName: \"kubernetes.io/projected/55391db7-2483-46c6-9d71-0915d81eb5ee-kube-api-access-zzqc5\") pod \"cinder-db-create-2xgm4\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.874637 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55391db7-2483-46c6-9d71-0915d81eb5ee-operator-scripts\") pod \"cinder-db-create-2xgm4\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.875302 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55391db7-2483-46c6-9d71-0915d81eb5ee-operator-scripts\") pod \"cinder-db-create-2xgm4\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:18 crc kubenswrapper[4669]: I1210 15:38:18.917800 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zzqc5\" (UniqueName: \"kubernetes.io/projected/55391db7-2483-46c6-9d71-0915d81eb5ee-kube-api-access-zzqc5\") pod \"cinder-db-create-2xgm4\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.019165 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-92c4-account-create-update-dchgr"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.020123 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.024718 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.035799 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-92c4-account-create-update-dchgr"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.076745 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khqkx\" (UniqueName: \"kubernetes.io/projected/4ee39beb-9369-4750-8c57-2afa1e96029e-kube-api-access-khqkx\") pod \"barbican-92c4-account-create-update-dchgr\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.076850 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ee39beb-9369-4750-8c57-2afa1e96029e-operator-scripts\") pod \"barbican-92c4-account-create-update-dchgr\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.098961 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-s4fqw"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.108541 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.116245 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.155195 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-6e58-account-create-update-kp9vk"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.156434 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.161624 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.177694 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5mh8\" (UniqueName: \"kubernetes.io/projected/639a743d-6218-4ed2-82b1-e898c97906e8-kube-api-access-v5mh8\") pod \"cinder-6e58-account-create-update-kp9vk\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.177748 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/639a743d-6218-4ed2-82b1-e898c97906e8-operator-scripts\") pod \"cinder-6e58-account-create-update-kp9vk\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.177780 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khqkx\" (UniqueName: \"kubernetes.io/projected/4ee39beb-9369-4750-8c57-2afa1e96029e-kube-api-access-khqkx\") pod \"barbican-92c4-account-create-update-dchgr\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.177856 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5f2fd35-f21f-41d2-a254-3460f66af1f3-operator-scripts\") pod \"barbican-db-create-s4fqw\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.177893 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k29wz\" (UniqueName: \"kubernetes.io/projected/f5f2fd35-f21f-41d2-a254-3460f66af1f3-kube-api-access-k29wz\") pod \"barbican-db-create-s4fqw\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.177964 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ee39beb-9369-4750-8c57-2afa1e96029e-operator-scripts\") pod \"barbican-92c4-account-create-update-dchgr\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.184325 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ee39beb-9369-4750-8c57-2afa1e96029e-operator-scripts\") pod \"barbican-92c4-account-create-update-dchgr\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.189485 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6e58-account-create-update-kp9vk"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.199878 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-k9llm"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.202827 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: W1210 15:38:19.205175 4669 reflector.go:561] object-"openstack"/"keystone-keystone-dockercfg-9lkp5": failed to list *v1.Secret: secrets "keystone-keystone-dockercfg-9lkp5" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 10 15:38:19 crc kubenswrapper[4669]: E1210 15:38:19.205236 4669 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"keystone-keystone-dockercfg-9lkp5\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"keystone-keystone-dockercfg-9lkp5\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.206743 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-s4fqw"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.207311 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 15:38:19 crc kubenswrapper[4669]: W1210 15:38:19.214829 4669 reflector.go:561] object-"openstack"/"keystone-config-data": failed to list *v1.Secret: secrets "keystone-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.214855 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 15:38:19 crc kubenswrapper[4669]: E1210 15:38:19.214869 4669 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"keystone-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"keystone-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.233287 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khqkx\" (UniqueName: \"kubernetes.io/projected/4ee39beb-9369-4750-8c57-2afa1e96029e-kube-api-access-khqkx\") pod \"barbican-92c4-account-create-update-dchgr\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.250287 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-k9llm"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.262516 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-vhhn7"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.285119 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.291320 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5f2fd35-f21f-41d2-a254-3460f66af1f3-operator-scripts\") pod \"barbican-db-create-s4fqw\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.291390 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k29wz\" (UniqueName: \"kubernetes.io/projected/f5f2fd35-f21f-41d2-a254-3460f66af1f3-kube-api-access-k29wz\") pod \"barbican-db-create-s4fqw\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.291463 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5mh8\" (UniqueName: \"kubernetes.io/projected/639a743d-6218-4ed2-82b1-e898c97906e8-kube-api-access-v5mh8\") pod \"cinder-6e58-account-create-update-kp9vk\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.291496 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/639a743d-6218-4ed2-82b1-e898c97906e8-operator-scripts\") pod \"cinder-6e58-account-create-update-kp9vk\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.292250 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/639a743d-6218-4ed2-82b1-e898c97906e8-operator-scripts\") pod \"cinder-6e58-account-create-update-kp9vk\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.292771 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5f2fd35-f21f-41d2-a254-3460f66af1f3-operator-scripts\") pod \"barbican-db-create-s4fqw\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.295335 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vhhn7"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.326958 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5mh8\" (UniqueName: \"kubernetes.io/projected/639a743d-6218-4ed2-82b1-e898c97906e8-kube-api-access-v5mh8\") pod \"cinder-6e58-account-create-update-kp9vk\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.334775 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k29wz\" (UniqueName: \"kubernetes.io/projected/f5f2fd35-f21f-41d2-a254-3460f66af1f3-kube-api-access-k29wz\") pod \"barbican-db-create-s4fqw\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.335668 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.395338 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.395418 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7125626d-5fb6-43fc-ade4-912d54fb0b76-operator-scripts\") pod \"neutron-db-create-vhhn7\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.395455 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs4zb\" (UniqueName: \"kubernetes.io/projected/9f5ebc22-6893-415d-906b-bea9f82f18f4-kube-api-access-gs4zb\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.395492 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-combined-ca-bundle\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.395526 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blnm5\" (UniqueName: \"kubernetes.io/projected/7125626d-5fb6-43fc-ade4-912d54fb0b76-kube-api-access-blnm5\") pod \"neutron-db-create-vhhn7\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.421625 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.477650 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.499679 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blnm5\" (UniqueName: \"kubernetes.io/projected/7125626d-5fb6-43fc-ade4-912d54fb0b76-kube-api-access-blnm5\") pod \"neutron-db-create-vhhn7\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.499965 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.500010 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7125626d-5fb6-43fc-ade4-912d54fb0b76-operator-scripts\") pod \"neutron-db-create-vhhn7\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.500071 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs4zb\" (UniqueName: \"kubernetes.io/projected/9f5ebc22-6893-415d-906b-bea9f82f18f4-kube-api-access-gs4zb\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.500115 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-combined-ca-bundle\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.501282 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7125626d-5fb6-43fc-ade4-912d54fb0b76-operator-scripts\") pod \"neutron-db-create-vhhn7\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.524660 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs4zb\" (UniqueName: \"kubernetes.io/projected/9f5ebc22-6893-415d-906b-bea9f82f18f4-kube-api-access-gs4zb\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.528071 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-combined-ca-bundle\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.534840 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blnm5\" (UniqueName: \"kubernetes.io/projected/7125626d-5fb6-43fc-ade4-912d54fb0b76-kube-api-access-blnm5\") pod \"neutron-db-create-vhhn7\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.612571 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.622731 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-a904-account-create-update-nz8ns"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.624499 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.632865 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.638175 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a904-account-create-update-nz8ns"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.750041 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" event={"ID":"8c5beee8-d55f-4b9a-bf26-1207a5a6264f","Type":"ContainerStarted","Data":"c7192d4b933a04bd5ce8ccdeadfc0c93684d06579ec03708ccf394b6f7616d7c"} Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.751010 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.785415 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" podStartSLOduration=2.785394477 podStartE2EDuration="2.785394477s" podCreationTimestamp="2025-12-10 15:38:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:19.784813263 +0000 UTC m=+1073.701759900" watchObservedRunningTime="2025-12-10 15:38:19.785394477 +0000 UTC m=+1073.702341104" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.815475 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm775\" (UniqueName: \"kubernetes.io/projected/c21c00ce-da75-42fb-998d-d41f116d076f-kube-api-access-fm775\") pod \"neutron-a904-account-create-update-nz8ns\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.816512 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c21c00ce-da75-42fb-998d-d41f116d076f-operator-scripts\") pod \"neutron-a904-account-create-update-nz8ns\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.865875 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-2xgm4"] Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.918823 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm775\" (UniqueName: \"kubernetes.io/projected/c21c00ce-da75-42fb-998d-d41f116d076f-kube-api-access-fm775\") pod \"neutron-a904-account-create-update-nz8ns\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.918880 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c21c00ce-da75-42fb-998d-d41f116d076f-operator-scripts\") pod \"neutron-a904-account-create-update-nz8ns\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.920658 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c21c00ce-da75-42fb-998d-d41f116d076f-operator-scripts\") pod \"neutron-a904-account-create-update-nz8ns\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.939534 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm775\" (UniqueName: \"kubernetes.io/projected/c21c00ce-da75-42fb-998d-d41f116d076f-kube-api-access-fm775\") pod \"neutron-a904-account-create-update-nz8ns\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:19 crc kubenswrapper[4669]: I1210 15:38:19.960765 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.099874 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-92c4-account-create-update-dchgr"] Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.194180 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-s4fqw"] Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.288135 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vhhn7"] Dec 10 15:38:20 crc kubenswrapper[4669]: W1210 15:38:20.288617 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7125626d_5fb6_43fc_ade4_912d54fb0b76.slice/crio-bce2eba036919128e725bcc7ea06b909755f893d196e5994f5387fb49a26b0e5 WatchSource:0}: Error finding container bce2eba036919128e725bcc7ea06b909755f893d196e5994f5387fb49a26b0e5: Status 404 returned error can't find the container with id bce2eba036919128e725bcc7ea06b909755f893d196e5994f5387fb49a26b0e5 Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.314811 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-6e58-account-create-update-kp9vk"] Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.431492 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9lkp5" Dec 10 15:38:20 crc kubenswrapper[4669]: E1210 15:38:20.502913 4669 secret.go:188] Couldn't get secret openstack/keystone-config-data: failed to sync secret cache: timed out waiting for the condition Dec 10 15:38:20 crc kubenswrapper[4669]: E1210 15:38:20.502997 4669 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data podName:9f5ebc22-6893-415d-906b-bea9f82f18f4 nodeName:}" failed. No retries permitted until 2025-12-10 15:38:21.002974709 +0000 UTC m=+1074.919921336 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data") pod "keystone-db-sync-k9llm" (UID: "9f5ebc22-6893-415d-906b-bea9f82f18f4") : failed to sync secret cache: timed out waiting for the condition Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.507762 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-a904-account-create-update-nz8ns"] Dec 10 15:38:20 crc kubenswrapper[4669]: W1210 15:38:20.524574 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc21c00ce_da75_42fb_998d_d41f116d076f.slice/crio-db172c5b76d71b1864a039b948259801a852cb7f27060ad825494147972e165d WatchSource:0}: Error finding container db172c5b76d71b1864a039b948259801a852cb7f27060ad825494147972e165d: Status 404 returned error can't find the container with id db172c5b76d71b1864a039b948259801a852cb7f27060ad825494147972e165d Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.531000 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.767895 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-92c4-account-create-update-dchgr" event={"ID":"4ee39beb-9369-4750-8c57-2afa1e96029e","Type":"ContainerStarted","Data":"1d5c73f914f1acf2d5f510466c422834680ac55d4982b523dbc4ae1c165bd4e5"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.767940 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-92c4-account-create-update-dchgr" event={"ID":"4ee39beb-9369-4750-8c57-2afa1e96029e","Type":"ContainerStarted","Data":"d0cd616b53edd9f6c7d74d15468e043af24e1e616fc858e83526ff9b8c98ee0d"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.771922 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6e58-account-create-update-kp9vk" event={"ID":"639a743d-6218-4ed2-82b1-e898c97906e8","Type":"ContainerStarted","Data":"cefebf846931dc5c84e5e54db1cd0967eab1efd6ebadd970ad922b7e32206890"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.776444 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-s4fqw" event={"ID":"f5f2fd35-f21f-41d2-a254-3460f66af1f3","Type":"ContainerStarted","Data":"2d589bd7fa4cd27fc2e5da50066b57d7ee094b8a9f4829123f9abfafb9909c3f"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.776489 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-s4fqw" event={"ID":"f5f2fd35-f21f-41d2-a254-3460f66af1f3","Type":"ContainerStarted","Data":"e0bbc29e962064f20e8fd54474ea504de592eebd1b5f755ab128b4972b037dd5"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.779720 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a904-account-create-update-nz8ns" event={"ID":"c21c00ce-da75-42fb-998d-d41f116d076f","Type":"ContainerStarted","Data":"db172c5b76d71b1864a039b948259801a852cb7f27060ad825494147972e165d"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.782376 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vhhn7" event={"ID":"7125626d-5fb6-43fc-ade4-912d54fb0b76","Type":"ContainerStarted","Data":"1d9c2494a29ed905770e6ac5033471f8046970e9721c11e102815caaa6bd0b03"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.782419 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vhhn7" event={"ID":"7125626d-5fb6-43fc-ade4-912d54fb0b76","Type":"ContainerStarted","Data":"bce2eba036919128e725bcc7ea06b909755f893d196e5994f5387fb49a26b0e5"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.784139 4669 generic.go:334] "Generic (PLEG): container finished" podID="55391db7-2483-46c6-9d71-0915d81eb5ee" containerID="634a6f021233b89894ca2fa0a80329f99cac1f892f38991df2ffa9cbc003dd55" exitCode=0 Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.784874 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-2xgm4" event={"ID":"55391db7-2483-46c6-9d71-0915d81eb5ee","Type":"ContainerDied","Data":"634a6f021233b89894ca2fa0a80329f99cac1f892f38991df2ffa9cbc003dd55"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.784902 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-2xgm4" event={"ID":"55391db7-2483-46c6-9d71-0915d81eb5ee","Type":"ContainerStarted","Data":"e953263704449d4935c344247be57f33b142f72d4795940b16de75aa29f37fc9"} Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.813315 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-92c4-account-create-update-dchgr" podStartSLOduration=2.813295594 podStartE2EDuration="2.813295594s" podCreationTimestamp="2025-12-10 15:38:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:20.797552334 +0000 UTC m=+1074.714498971" watchObservedRunningTime="2025-12-10 15:38:20.813295594 +0000 UTC m=+1074.730242221" Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.815284 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-s4fqw" podStartSLOduration=1.8152733620000001 podStartE2EDuration="1.815273362s" podCreationTimestamp="2025-12-10 15:38:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:20.811248102 +0000 UTC m=+1074.728194729" watchObservedRunningTime="2025-12-10 15:38:20.815273362 +0000 UTC m=+1074.732219989" Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.833229 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-create-vhhn7" podStartSLOduration=1.833194505 podStartE2EDuration="1.833194505s" podCreationTimestamp="2025-12-10 15:38:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:20.829455142 +0000 UTC m=+1074.746401769" watchObservedRunningTime="2025-12-10 15:38:20.833194505 +0000 UTC m=+1074.750141132" Dec 10 15:38:20 crc kubenswrapper[4669]: I1210 15:38:20.873183 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-6e58-account-create-update-kp9vk" podStartSLOduration=1.873167982 podStartE2EDuration="1.873167982s" podCreationTimestamp="2025-12-10 15:38:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:20.868079437 +0000 UTC m=+1074.785026064" watchObservedRunningTime="2025-12-10 15:38:20.873167982 +0000 UTC m=+1074.790114609" Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.039002 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.044954 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data\") pod \"keystone-db-sync-k9llm\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.317864 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.807600 4669 generic.go:334] "Generic (PLEG): container finished" podID="f5f2fd35-f21f-41d2-a254-3460f66af1f3" containerID="2d589bd7fa4cd27fc2e5da50066b57d7ee094b8a9f4829123f9abfafb9909c3f" exitCode=0 Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.807702 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-s4fqw" event={"ID":"f5f2fd35-f21f-41d2-a254-3460f66af1f3","Type":"ContainerDied","Data":"2d589bd7fa4cd27fc2e5da50066b57d7ee094b8a9f4829123f9abfafb9909c3f"} Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.809924 4669 generic.go:334] "Generic (PLEG): container finished" podID="c21c00ce-da75-42fb-998d-d41f116d076f" containerID="a5e1ce484fe54aa0d64c4bcd7705e74501dd705c5fd85c8f02db5e44be07ec2c" exitCode=0 Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.810284 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a904-account-create-update-nz8ns" event={"ID":"c21c00ce-da75-42fb-998d-d41f116d076f","Type":"ContainerDied","Data":"a5e1ce484fe54aa0d64c4bcd7705e74501dd705c5fd85c8f02db5e44be07ec2c"} Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.812911 4669 generic.go:334] "Generic (PLEG): container finished" podID="7125626d-5fb6-43fc-ade4-912d54fb0b76" containerID="1d9c2494a29ed905770e6ac5033471f8046970e9721c11e102815caaa6bd0b03" exitCode=0 Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.812998 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vhhn7" event={"ID":"7125626d-5fb6-43fc-ade4-912d54fb0b76","Type":"ContainerDied","Data":"1d9c2494a29ed905770e6ac5033471f8046970e9721c11e102815caaa6bd0b03"} Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.814639 4669 generic.go:334] "Generic (PLEG): container finished" podID="4ee39beb-9369-4750-8c57-2afa1e96029e" containerID="1d5c73f914f1acf2d5f510466c422834680ac55d4982b523dbc4ae1c165bd4e5" exitCode=0 Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.814733 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-92c4-account-create-update-dchgr" event={"ID":"4ee39beb-9369-4750-8c57-2afa1e96029e","Type":"ContainerDied","Data":"1d5c73f914f1acf2d5f510466c422834680ac55d4982b523dbc4ae1c165bd4e5"} Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.816526 4669 generic.go:334] "Generic (PLEG): container finished" podID="639a743d-6218-4ed2-82b1-e898c97906e8" containerID="b80f73ad83f7ad7ab81d4fecef4f7e80e06a0c5defaff50d432db01970dfca13" exitCode=0 Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.817745 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6e58-account-create-update-kp9vk" event={"ID":"639a743d-6218-4ed2-82b1-e898c97906e8","Type":"ContainerDied","Data":"b80f73ad83f7ad7ab81d4fecef4f7e80e06a0c5defaff50d432db01970dfca13"} Dec 10 15:38:21 crc kubenswrapper[4669]: I1210 15:38:21.861004 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-k9llm"] Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.188351 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.362338 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zzqc5\" (UniqueName: \"kubernetes.io/projected/55391db7-2483-46c6-9d71-0915d81eb5ee-kube-api-access-zzqc5\") pod \"55391db7-2483-46c6-9d71-0915d81eb5ee\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.362456 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55391db7-2483-46c6-9d71-0915d81eb5ee-operator-scripts\") pod \"55391db7-2483-46c6-9d71-0915d81eb5ee\" (UID: \"55391db7-2483-46c6-9d71-0915d81eb5ee\") " Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.362938 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/55391db7-2483-46c6-9d71-0915d81eb5ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "55391db7-2483-46c6-9d71-0915d81eb5ee" (UID: "55391db7-2483-46c6-9d71-0915d81eb5ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.363646 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/55391db7-2483-46c6-9d71-0915d81eb5ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.369726 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55391db7-2483-46c6-9d71-0915d81eb5ee-kube-api-access-zzqc5" (OuterVolumeSpecName: "kube-api-access-zzqc5") pod "55391db7-2483-46c6-9d71-0915d81eb5ee" (UID: "55391db7-2483-46c6-9d71-0915d81eb5ee"). InnerVolumeSpecName "kube-api-access-zzqc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.465629 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zzqc5\" (UniqueName: \"kubernetes.io/projected/55391db7-2483-46c6-9d71-0915d81eb5ee-kube-api-access-zzqc5\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.826122 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k9llm" event={"ID":"9f5ebc22-6893-415d-906b-bea9f82f18f4","Type":"ContainerStarted","Data":"621093ef15d23340a76e3bff91ef4b0e0ab799f95f8bda42b77867edf532553a"} Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.828924 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-2xgm4" Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.829834 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-2xgm4" event={"ID":"55391db7-2483-46c6-9d71-0915d81eb5ee","Type":"ContainerDied","Data":"e953263704449d4935c344247be57f33b142f72d4795940b16de75aa29f37fc9"} Dec 10 15:38:22 crc kubenswrapper[4669]: I1210 15:38:22.829901 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e953263704449d4935c344247be57f33b142f72d4795940b16de75aa29f37fc9" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.248906 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.279150 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/639a743d-6218-4ed2-82b1-e898c97906e8-operator-scripts\") pod \"639a743d-6218-4ed2-82b1-e898c97906e8\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.279199 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v5mh8\" (UniqueName: \"kubernetes.io/projected/639a743d-6218-4ed2-82b1-e898c97906e8-kube-api-access-v5mh8\") pod \"639a743d-6218-4ed2-82b1-e898c97906e8\" (UID: \"639a743d-6218-4ed2-82b1-e898c97906e8\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.279947 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/639a743d-6218-4ed2-82b1-e898c97906e8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "639a743d-6218-4ed2-82b1-e898c97906e8" (UID: "639a743d-6218-4ed2-82b1-e898c97906e8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.297425 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/639a743d-6218-4ed2-82b1-e898c97906e8-kube-api-access-v5mh8" (OuterVolumeSpecName: "kube-api-access-v5mh8") pod "639a743d-6218-4ed2-82b1-e898c97906e8" (UID: "639a743d-6218-4ed2-82b1-e898c97906e8"). InnerVolumeSpecName "kube-api-access-v5mh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.381123 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/639a743d-6218-4ed2-82b1-e898c97906e8-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.381152 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v5mh8\" (UniqueName: \"kubernetes.io/projected/639a743d-6218-4ed2-82b1-e898c97906e8-kube-api-access-v5mh8\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.494771 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.504337 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.516025 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.536389 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.684741 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7125626d-5fb6-43fc-ade4-912d54fb0b76-operator-scripts\") pod \"7125626d-5fb6-43fc-ade4-912d54fb0b76\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.684837 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ee39beb-9369-4750-8c57-2afa1e96029e-operator-scripts\") pod \"4ee39beb-9369-4750-8c57-2afa1e96029e\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.684898 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c21c00ce-da75-42fb-998d-d41f116d076f-operator-scripts\") pod \"c21c00ce-da75-42fb-998d-d41f116d076f\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.684925 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blnm5\" (UniqueName: \"kubernetes.io/projected/7125626d-5fb6-43fc-ade4-912d54fb0b76-kube-api-access-blnm5\") pod \"7125626d-5fb6-43fc-ade4-912d54fb0b76\" (UID: \"7125626d-5fb6-43fc-ade4-912d54fb0b76\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.684968 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5f2fd35-f21f-41d2-a254-3460f66af1f3-operator-scripts\") pod \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.685004 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k29wz\" (UniqueName: \"kubernetes.io/projected/f5f2fd35-f21f-41d2-a254-3460f66af1f3-kube-api-access-k29wz\") pod \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\" (UID: \"f5f2fd35-f21f-41d2-a254-3460f66af1f3\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.685090 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khqkx\" (UniqueName: \"kubernetes.io/projected/4ee39beb-9369-4750-8c57-2afa1e96029e-kube-api-access-khqkx\") pod \"4ee39beb-9369-4750-8c57-2afa1e96029e\" (UID: \"4ee39beb-9369-4750-8c57-2afa1e96029e\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.685157 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm775\" (UniqueName: \"kubernetes.io/projected/c21c00ce-da75-42fb-998d-d41f116d076f-kube-api-access-fm775\") pod \"c21c00ce-da75-42fb-998d-d41f116d076f\" (UID: \"c21c00ce-da75-42fb-998d-d41f116d076f\") " Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.685960 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c21c00ce-da75-42fb-998d-d41f116d076f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c21c00ce-da75-42fb-998d-d41f116d076f" (UID: "c21c00ce-da75-42fb-998d-d41f116d076f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.686027 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5f2fd35-f21f-41d2-a254-3460f66af1f3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f5f2fd35-f21f-41d2-a254-3460f66af1f3" (UID: "f5f2fd35-f21f-41d2-a254-3460f66af1f3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.686122 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7125626d-5fb6-43fc-ade4-912d54fb0b76-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7125626d-5fb6-43fc-ade4-912d54fb0b76" (UID: "7125626d-5fb6-43fc-ade4-912d54fb0b76"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.686655 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ee39beb-9369-4750-8c57-2afa1e96029e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ee39beb-9369-4750-8c57-2afa1e96029e" (UID: "4ee39beb-9369-4750-8c57-2afa1e96029e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.689489 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c21c00ce-da75-42fb-998d-d41f116d076f-kube-api-access-fm775" (OuterVolumeSpecName: "kube-api-access-fm775") pod "c21c00ce-da75-42fb-998d-d41f116d076f" (UID: "c21c00ce-da75-42fb-998d-d41f116d076f"). InnerVolumeSpecName "kube-api-access-fm775". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.689750 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ee39beb-9369-4750-8c57-2afa1e96029e-kube-api-access-khqkx" (OuterVolumeSpecName: "kube-api-access-khqkx") pod "4ee39beb-9369-4750-8c57-2afa1e96029e" (UID: "4ee39beb-9369-4750-8c57-2afa1e96029e"). InnerVolumeSpecName "kube-api-access-khqkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.691210 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5f2fd35-f21f-41d2-a254-3460f66af1f3-kube-api-access-k29wz" (OuterVolumeSpecName: "kube-api-access-k29wz") pod "f5f2fd35-f21f-41d2-a254-3460f66af1f3" (UID: "f5f2fd35-f21f-41d2-a254-3460f66af1f3"). InnerVolumeSpecName "kube-api-access-k29wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.710659 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7125626d-5fb6-43fc-ade4-912d54fb0b76-kube-api-access-blnm5" (OuterVolumeSpecName: "kube-api-access-blnm5") pod "7125626d-5fb6-43fc-ade4-912d54fb0b76" (UID: "7125626d-5fb6-43fc-ade4-912d54fb0b76"). InnerVolumeSpecName "kube-api-access-blnm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.787477 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k29wz\" (UniqueName: \"kubernetes.io/projected/f5f2fd35-f21f-41d2-a254-3460f66af1f3-kube-api-access-k29wz\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.788602 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khqkx\" (UniqueName: \"kubernetes.io/projected/4ee39beb-9369-4750-8c57-2afa1e96029e-kube-api-access-khqkx\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.788722 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm775\" (UniqueName: \"kubernetes.io/projected/c21c00ce-da75-42fb-998d-d41f116d076f-kube-api-access-fm775\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.788900 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7125626d-5fb6-43fc-ade4-912d54fb0b76-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.789098 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ee39beb-9369-4750-8c57-2afa1e96029e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.789240 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c21c00ce-da75-42fb-998d-d41f116d076f-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.789375 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blnm5\" (UniqueName: \"kubernetes.io/projected/7125626d-5fb6-43fc-ade4-912d54fb0b76-kube-api-access-blnm5\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.789511 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f5f2fd35-f21f-41d2-a254-3460f66af1f3-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.840337 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-s4fqw" event={"ID":"f5f2fd35-f21f-41d2-a254-3460f66af1f3","Type":"ContainerDied","Data":"e0bbc29e962064f20e8fd54474ea504de592eebd1b5f755ab128b4972b037dd5"} Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.840967 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e0bbc29e962064f20e8fd54474ea504de592eebd1b5f755ab128b4972b037dd5" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.841023 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-s4fqw" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.858360 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-a904-account-create-update-nz8ns" event={"ID":"c21c00ce-da75-42fb-998d-d41f116d076f","Type":"ContainerDied","Data":"db172c5b76d71b1864a039b948259801a852cb7f27060ad825494147972e165d"} Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.858747 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db172c5b76d71b1864a039b948259801a852cb7f27060ad825494147972e165d" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.858695 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-a904-account-create-update-nz8ns" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.860947 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vhhn7" event={"ID":"7125626d-5fb6-43fc-ade4-912d54fb0b76","Type":"ContainerDied","Data":"bce2eba036919128e725bcc7ea06b909755f893d196e5994f5387fb49a26b0e5"} Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.860987 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bce2eba036919128e725bcc7ea06b909755f893d196e5994f5387fb49a26b0e5" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.860953 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vhhn7" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.862866 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-92c4-account-create-update-dchgr" event={"ID":"4ee39beb-9369-4750-8c57-2afa1e96029e","Type":"ContainerDied","Data":"d0cd616b53edd9f6c7d74d15468e043af24e1e616fc858e83526ff9b8c98ee0d"} Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.862893 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d0cd616b53edd9f6c7d74d15468e043af24e1e616fc858e83526ff9b8c98ee0d" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.862968 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-92c4-account-create-update-dchgr" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.873082 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-6e58-account-create-update-kp9vk" event={"ID":"639a743d-6218-4ed2-82b1-e898c97906e8","Type":"ContainerDied","Data":"cefebf846931dc5c84e5e54db1cd0967eab1efd6ebadd970ad922b7e32206890"} Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.873115 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cefebf846931dc5c84e5e54db1cd0967eab1efd6ebadd970ad922b7e32206890" Dec 10 15:38:23 crc kubenswrapper[4669]: I1210 15:38:23.873170 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-6e58-account-create-update-kp9vk" Dec 10 15:38:23 crc kubenswrapper[4669]: E1210 15:38:23.890578 4669 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf5f2fd35_f21f_41d2_a254_3460f66af1f3.slice/crio-e0bbc29e962064f20e8fd54474ea504de592eebd1b5f755ab128b4972b037dd5\": RecentStats: unable to find data in memory cache]" Dec 10 15:38:27 crc kubenswrapper[4669]: I1210 15:38:27.553440 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:27 crc kubenswrapper[4669]: I1210 15:38:27.625085 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-hrrzd"] Dec 10 15:38:27 crc kubenswrapper[4669]: I1210 15:38:27.625403 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-hrrzd" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerName="dnsmasq-dns" containerID="cri-o://4852aa868ecee3c0c2b23541cc99e3325687508237051e4c1c74a39c2701436b" gracePeriod=10 Dec 10 15:38:28 crc kubenswrapper[4669]: I1210 15:38:28.956998 4669 generic.go:334] "Generic (PLEG): container finished" podID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerID="4852aa868ecee3c0c2b23541cc99e3325687508237051e4c1c74a39c2701436b" exitCode=0 Dec 10 15:38:28 crc kubenswrapper[4669]: I1210 15:38:28.957250 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-hrrzd" event={"ID":"e80eb23e-ed94-41a4-ba96-ae286b6b2bca","Type":"ContainerDied","Data":"4852aa868ecee3c0c2b23541cc99e3325687508237051e4c1c74a39c2701436b"} Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.105601 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.208469 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-sb\") pod \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.208566 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-config\") pod \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.208690 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-dns-svc\") pod \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.208717 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-nb\") pod \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.208749 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q9txd\" (UniqueName: \"kubernetes.io/projected/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-kube-api-access-q9txd\") pod \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\" (UID: \"e80eb23e-ed94-41a4-ba96-ae286b6b2bca\") " Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.214366 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-kube-api-access-q9txd" (OuterVolumeSpecName: "kube-api-access-q9txd") pod "e80eb23e-ed94-41a4-ba96-ae286b6b2bca" (UID: "e80eb23e-ed94-41a4-ba96-ae286b6b2bca"). InnerVolumeSpecName "kube-api-access-q9txd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.252899 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e80eb23e-ed94-41a4-ba96-ae286b6b2bca" (UID: "e80eb23e-ed94-41a4-ba96-ae286b6b2bca"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.257983 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e80eb23e-ed94-41a4-ba96-ae286b6b2bca" (UID: "e80eb23e-ed94-41a4-ba96-ae286b6b2bca"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.262440 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-config" (OuterVolumeSpecName: "config") pod "e80eb23e-ed94-41a4-ba96-ae286b6b2bca" (UID: "e80eb23e-ed94-41a4-ba96-ae286b6b2bca"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.273134 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e80eb23e-ed94-41a4-ba96-ae286b6b2bca" (UID: "e80eb23e-ed94-41a4-ba96-ae286b6b2bca"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.310639 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.310672 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.310683 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.310692 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.310702 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q9txd\" (UniqueName: \"kubernetes.io/projected/e80eb23e-ed94-41a4-ba96-ae286b6b2bca-kube-api-access-q9txd\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.966284 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-hrrzd" event={"ID":"e80eb23e-ed94-41a4-ba96-ae286b6b2bca","Type":"ContainerDied","Data":"ef5d8998a206118bc7b07a7f1d44dc49124a25fe8e5e363c52d357a2f2cc1735"} Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.966616 4669 scope.go:117] "RemoveContainer" containerID="4852aa868ecee3c0c2b23541cc99e3325687508237051e4c1c74a39c2701436b" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.966748 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-hrrzd" Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.975347 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k9llm" event={"ID":"9f5ebc22-6893-415d-906b-bea9f82f18f4","Type":"ContainerStarted","Data":"b6e7fc81b5a184992accb517e648b13e03db3428eaaf7b672c5d9e87023e6adf"} Dec 10 15:38:29 crc kubenswrapper[4669]: I1210 15:38:29.992646 4669 scope.go:117] "RemoveContainer" containerID="ccf41bac5a763b89aea2c4e697418c1b1a20f12a4fe884d831bfa2aa10c178f3" Dec 10 15:38:30 crc kubenswrapper[4669]: I1210 15:38:30.005164 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-k9llm" podStartSLOduration=4.058507384 podStartE2EDuration="11.005146403s" podCreationTimestamp="2025-12-10 15:38:19 +0000 UTC" firstStartedPulling="2025-12-10 15:38:21.855627886 +0000 UTC m=+1075.772574503" lastFinishedPulling="2025-12-10 15:38:28.802266895 +0000 UTC m=+1082.719213522" observedRunningTime="2025-12-10 15:38:30.002875027 +0000 UTC m=+1083.919821654" watchObservedRunningTime="2025-12-10 15:38:30.005146403 +0000 UTC m=+1083.922093030" Dec 10 15:38:30 crc kubenswrapper[4669]: I1210 15:38:30.029753 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-hrrzd"] Dec 10 15:38:30 crc kubenswrapper[4669]: I1210 15:38:30.037059 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-hrrzd"] Dec 10 15:38:30 crc kubenswrapper[4669]: I1210 15:38:30.407887 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" path="/var/lib/kubelet/pods/e80eb23e-ed94-41a4-ba96-ae286b6b2bca/volumes" Dec 10 15:38:33 crc kubenswrapper[4669]: I1210 15:38:33.012254 4669 generic.go:334] "Generic (PLEG): container finished" podID="9f5ebc22-6893-415d-906b-bea9f82f18f4" containerID="b6e7fc81b5a184992accb517e648b13e03db3428eaaf7b672c5d9e87023e6adf" exitCode=0 Dec 10 15:38:33 crc kubenswrapper[4669]: I1210 15:38:33.012341 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k9llm" event={"ID":"9f5ebc22-6893-415d-906b-bea9f82f18f4","Type":"ContainerDied","Data":"b6e7fc81b5a184992accb517e648b13e03db3428eaaf7b672c5d9e87023e6adf"} Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.382112 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.493742 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data\") pod \"9f5ebc22-6893-415d-906b-bea9f82f18f4\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.493792 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-combined-ca-bundle\") pod \"9f5ebc22-6893-415d-906b-bea9f82f18f4\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.493973 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gs4zb\" (UniqueName: \"kubernetes.io/projected/9f5ebc22-6893-415d-906b-bea9f82f18f4-kube-api-access-gs4zb\") pod \"9f5ebc22-6893-415d-906b-bea9f82f18f4\" (UID: \"9f5ebc22-6893-415d-906b-bea9f82f18f4\") " Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.510060 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f5ebc22-6893-415d-906b-bea9f82f18f4-kube-api-access-gs4zb" (OuterVolumeSpecName: "kube-api-access-gs4zb") pod "9f5ebc22-6893-415d-906b-bea9f82f18f4" (UID: "9f5ebc22-6893-415d-906b-bea9f82f18f4"). InnerVolumeSpecName "kube-api-access-gs4zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.521157 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9f5ebc22-6893-415d-906b-bea9f82f18f4" (UID: "9f5ebc22-6893-415d-906b-bea9f82f18f4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.540956 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data" (OuterVolumeSpecName: "config-data") pod "9f5ebc22-6893-415d-906b-bea9f82f18f4" (UID: "9f5ebc22-6893-415d-906b-bea9f82f18f4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.604782 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gs4zb\" (UniqueName: \"kubernetes.io/projected/9f5ebc22-6893-415d-906b-bea9f82f18f4-kube-api-access-gs4zb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.604823 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:34 crc kubenswrapper[4669]: I1210 15:38:34.604839 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f5ebc22-6893-415d-906b-bea9f82f18f4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.028512 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-k9llm" event={"ID":"9f5ebc22-6893-415d-906b-bea9f82f18f4","Type":"ContainerDied","Data":"621093ef15d23340a76e3bff91ef4b0e0ab799f95f8bda42b77867edf532553a"} Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.028557 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="621093ef15d23340a76e3bff91ef4b0e0ab799f95f8bda42b77867edf532553a" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.028588 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-k9llm" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.243435 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-67795cd9-6gfbf"] Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244057 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55391db7-2483-46c6-9d71-0915d81eb5ee" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244080 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="55391db7-2483-46c6-9d71-0915d81eb5ee" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244096 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5f2fd35-f21f-41d2-a254-3460f66af1f3" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244104 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5f2fd35-f21f-41d2-a254-3460f66af1f3" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244120 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c21c00ce-da75-42fb-998d-d41f116d076f" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244128 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c21c00ce-da75-42fb-998d-d41f116d076f" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244143 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee39beb-9369-4750-8c57-2afa1e96029e" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244150 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee39beb-9369-4750-8c57-2afa1e96029e" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244164 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="639a743d-6218-4ed2-82b1-e898c97906e8" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244172 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="639a743d-6218-4ed2-82b1-e898c97906e8" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244181 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerName="init" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244187 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerName="init" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244197 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7125626d-5fb6-43fc-ade4-912d54fb0b76" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244203 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7125626d-5fb6-43fc-ade4-912d54fb0b76" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244237 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerName="dnsmasq-dns" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244245 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerName="dnsmasq-dns" Dec 10 15:38:35 crc kubenswrapper[4669]: E1210 15:38:35.244256 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5ebc22-6893-415d-906b-bea9f82f18f4" containerName="keystone-db-sync" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244261 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5ebc22-6893-415d-906b-bea9f82f18f4" containerName="keystone-db-sync" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244402 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ee39beb-9369-4750-8c57-2afa1e96029e" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244416 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f5ebc22-6893-415d-906b-bea9f82f18f4" containerName="keystone-db-sync" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244425 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c21c00ce-da75-42fb-998d-d41f116d076f" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244434 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="55391db7-2483-46c6-9d71-0915d81eb5ee" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244442 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e80eb23e-ed94-41a4-ba96-ae286b6b2bca" containerName="dnsmasq-dns" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244453 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="7125626d-5fb6-43fc-ade4-912d54fb0b76" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244463 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5f2fd35-f21f-41d2-a254-3460f66af1f3" containerName="mariadb-database-create" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.244472 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="639a743d-6218-4ed2-82b1-e898c97906e8" containerName="mariadb-account-create-update" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.245354 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.258250 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-xlxc9"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.264098 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.275395 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.275584 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.275953 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9lkp5" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.280456 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.302051 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.302085 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-6gfbf"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.308337 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xlxc9"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420187 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfcsh\" (UniqueName: \"kubernetes.io/projected/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-kube-api-access-wfcsh\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420264 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-config\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420285 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-credential-keys\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420311 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-scripts\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420329 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-dns-svc\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420354 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420375 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-combined-ca-bundle\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420393 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7hdl\" (UniqueName: \"kubernetes.io/projected/f76ac545-01b9-49f4-a709-7448d8398622-kube-api-access-x7hdl\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420410 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420461 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-fernet-keys\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.420492 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-config-data\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.478997 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-jj4m7"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.479985 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.481847 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rvbcm" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.482276 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.485475 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.523890 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-fernet-keys\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.523962 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-config-data\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.523996 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfcsh\" (UniqueName: \"kubernetes.io/projected/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-kube-api-access-wfcsh\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.524046 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-config\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.524062 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-credential-keys\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.524096 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-scripts\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.524113 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-dns-svc\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.524824 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-dns-svc\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.525768 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.525815 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-combined-ca-bundle\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.525842 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7hdl\" (UniqueName: \"kubernetes.io/projected/f76ac545-01b9-49f4-a709-7448d8398622-kube-api-access-x7hdl\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.525881 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.526835 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-nb\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.533485 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-sb\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.534806 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-config\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.534914 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-jj4m7"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.539050 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-scripts\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.539705 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-config-data\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.547996 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-combined-ca-bundle\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.553846 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-credential-keys\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.555357 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-fernet-keys\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.607936 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7hdl\" (UniqueName: \"kubernetes.io/projected/f76ac545-01b9-49f4-a709-7448d8398622-kube-api-access-x7hdl\") pod \"keystone-bootstrap-xlxc9\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.608056 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfcsh\" (UniqueName: \"kubernetes.io/projected/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-kube-api-access-wfcsh\") pod \"dnsmasq-dns-67795cd9-6gfbf\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.608396 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.629506 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-config\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.629583 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-combined-ca-bundle\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.629645 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5dll\" (UniqueName: \"kubernetes.io/projected/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-kube-api-access-c5dll\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.669282 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-6gfbf"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.669883 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.723414 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-j8blj"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.725124 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735757 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rdnr\" (UniqueName: \"kubernetes.io/projected/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-kube-api-access-2rdnr\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735805 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-combined-ca-bundle\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735841 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-config\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735869 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735891 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735945 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5dll\" (UniqueName: \"kubernetes.io/projected/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-kube-api-access-c5dll\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.735980 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.736022 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-config\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.745858 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-config\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.749555 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-combined-ca-bundle\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.753122 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-hqkqq"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.754229 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.765126 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-q2jg7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.765368 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.765722 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.779959 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5dll\" (UniqueName: \"kubernetes.io/projected/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-kube-api-access-c5dll\") pod \"neutron-db-sync-jj4m7\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.783321 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-j8blj"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.802718 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.803239 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hqkqq"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.839932 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-config\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.839978 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.839996 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.840047 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.840115 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rdnr\" (UniqueName: \"kubernetes.io/projected/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-kube-api-access-2rdnr\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.841197 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-config\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.841787 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-dns-svc\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.856171 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-nb\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.856279 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-8cqw2"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.857293 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-sb\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.857644 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.864370 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.864566 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.864699 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-f45n4" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.878845 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-7w6wd"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.880389 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.896700 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.896878 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-59rnr" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.914070 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-8cqw2"] Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.953045 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-scripts\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.953136 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-logs\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.953191 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-config-data\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.953296 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cblx8\" (UniqueName: \"kubernetes.io/projected/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-kube-api-access-cblx8\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.953332 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-combined-ca-bundle\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.960123 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rdnr\" (UniqueName: \"kubernetes.io/projected/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-kube-api-access-2rdnr\") pod \"dnsmasq-dns-5b6dbdb6f5-j8blj\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:35 crc kubenswrapper[4669]: I1210 15:38:35.977948 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7w6wd"] Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.022103 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.044620 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.050157 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.050413 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099737 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-db-sync-config-data\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099779 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-scripts\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099821 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktbrr\" (UniqueName: \"kubernetes.io/projected/c69a866b-0a6c-446d-aeea-24a9a6e95efa-kube-api-access-ktbrr\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099839 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099859 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm44b\" (UniqueName: \"kubernetes.io/projected/20e32d2e-0738-45f0-bb91-b8e48694928b-kube-api-access-xm44b\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099881 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-scripts\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099919 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099939 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-db-sync-config-data\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099962 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-scripts\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.099992 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-logs\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100011 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-config-data\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100031 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20e32d2e-0738-45f0-bb91-b8e48694928b-etc-machine-id\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100047 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-config-data\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100078 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9r6s9\" (UniqueName: \"kubernetes.io/projected/e57dc4fd-35d5-4151-b620-7903e12be753-kube-api-access-9r6s9\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100100 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-config-data\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100131 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-combined-ca-bundle\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100157 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-log-httpd\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.100185 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-combined-ca-bundle\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.104956 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.111812 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-run-httpd\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.111899 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cblx8\" (UniqueName: \"kubernetes.io/projected/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-kube-api-access-cblx8\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.111973 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-combined-ca-bundle\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.112590 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-logs\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.131413 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-config-data\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.141203 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-scripts\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.142415 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-combined-ca-bundle\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.156348 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.175784 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cblx8\" (UniqueName: \"kubernetes.io/projected/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-kube-api-access-cblx8\") pod \"placement-db-sync-hqkqq\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216340 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-run-httpd\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216674 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-db-sync-config-data\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216701 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-scripts\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216718 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216735 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktbrr\" (UniqueName: \"kubernetes.io/projected/c69a866b-0a6c-446d-aeea-24a9a6e95efa-kube-api-access-ktbrr\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216754 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm44b\" (UniqueName: \"kubernetes.io/projected/20e32d2e-0738-45f0-bb91-b8e48694928b-kube-api-access-xm44b\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216769 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-scripts\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216795 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216811 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-db-sync-config-data\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216841 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-config-data\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216859 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20e32d2e-0738-45f0-bb91-b8e48694928b-etc-machine-id\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216873 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-config-data\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216898 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9r6s9\" (UniqueName: \"kubernetes.io/projected/e57dc4fd-35d5-4151-b620-7903e12be753-kube-api-access-9r6s9\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216933 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-combined-ca-bundle\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216957 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-log-httpd\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.216981 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-combined-ca-bundle\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.236766 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-run-httpd\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.250900 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20e32d2e-0738-45f0-bb91-b8e48694928b-etc-machine-id\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.251862 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-log-httpd\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.255183 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-scripts\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.259571 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-db-sync-config-data\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.276962 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-scripts\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.277324 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-combined-ca-bundle\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.278469 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.279016 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-combined-ca-bundle\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.279303 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-config-data\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.281161 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-config-data\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.281567 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.298440 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktbrr\" (UniqueName: \"kubernetes.io/projected/c69a866b-0a6c-446d-aeea-24a9a6e95efa-kube-api-access-ktbrr\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.300401 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9r6s9\" (UniqueName: \"kubernetes.io/projected/e57dc4fd-35d5-4151-b620-7903e12be753-kube-api-access-9r6s9\") pod \"ceilometer-0\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.307121 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm44b\" (UniqueName: \"kubernetes.io/projected/20e32d2e-0738-45f0-bb91-b8e48694928b-kube-api-access-xm44b\") pod \"cinder-db-sync-8cqw2\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.307475 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-db-sync-config-data\") pod \"barbican-db-sync-7w6wd\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.418359 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hqkqq" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.542297 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.569345 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:38:36 crc kubenswrapper[4669]: I1210 15:38:36.582633 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:36.634131 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-jj4m7"] Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:36.755396 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-6gfbf"] Dec 10 15:38:37 crc kubenswrapper[4669]: W1210 15:38:36.795283 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bca29e3_7697_4d29_b7f7_36d0fb0af5da.slice/crio-ba7cfd2511ff0e8432b138b1881e258bb1c9d1f2a528e446223033a593f17df8 WatchSource:0}: Error finding container ba7cfd2511ff0e8432b138b1881e258bb1c9d1f2a528e446223033a593f17df8: Status 404 returned error can't find the container with id ba7cfd2511ff0e8432b138b1881e258bb1c9d1f2a528e446223033a593f17df8 Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:37.138309 4669 generic.go:334] "Generic (PLEG): container finished" podID="8bca29e3-7697-4d29-b7f7-36d0fb0af5da" containerID="aa318fe21074273c1630ed760013878b5b8ee5954febd93902245f07a80b3728" exitCode=0 Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:37.138520 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" event={"ID":"8bca29e3-7697-4d29-b7f7-36d0fb0af5da","Type":"ContainerDied","Data":"aa318fe21074273c1630ed760013878b5b8ee5954febd93902245f07a80b3728"} Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:37.138691 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" event={"ID":"8bca29e3-7697-4d29-b7f7-36d0fb0af5da","Type":"ContainerStarted","Data":"ba7cfd2511ff0e8432b138b1881e258bb1c9d1f2a528e446223033a593f17df8"} Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:37.149496 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jj4m7" event={"ID":"0071bd3b-2920-4ba4-bf6e-8d1cacac2591","Type":"ContainerStarted","Data":"c37bd5aa7e9c21d27ec9dc5f4911c5cfc44adaac77bc8321302741a037bf2d08"} Dec 10 15:38:37 crc kubenswrapper[4669]: I1210 15:38:37.149541 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jj4m7" event={"ID":"0071bd3b-2920-4ba4-bf6e-8d1cacac2591","Type":"ContainerStarted","Data":"291e1b7d42c44f8bcfba01c93e61c749c61236e51761d01ccf2e96a2a2a7e784"} Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.243165 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-jj4m7" podStartSLOduration=3.243142615 podStartE2EDuration="3.243142615s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:37.186544201 +0000 UTC m=+1091.103490828" watchObservedRunningTime="2025-12-10 15:38:38.243142615 +0000 UTC m=+1092.160089242" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.245547 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xlxc9"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.450575 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.480793 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-8cqw2"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.532845 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.586731 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-sb\") pod \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.586817 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-nb\") pod \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.586880 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-dns-svc\") pod \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.586954 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfcsh\" (UniqueName: \"kubernetes.io/projected/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-kube-api-access-wfcsh\") pod \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.586988 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-config\") pod \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\" (UID: \"8bca29e3-7697-4d29-b7f7-36d0fb0af5da\") " Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.595785 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.633356 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-kube-api-access-wfcsh" (OuterVolumeSpecName: "kube-api-access-wfcsh") pod "8bca29e3-7697-4d29-b7f7-36d0fb0af5da" (UID: "8bca29e3-7697-4d29-b7f7-36d0fb0af5da"). InnerVolumeSpecName "kube-api-access-wfcsh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.638412 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8bca29e3-7697-4d29-b7f7-36d0fb0af5da" (UID: "8bca29e3-7697-4d29-b7f7-36d0fb0af5da"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.640553 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-j8blj"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.661072 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8bca29e3-7697-4d29-b7f7-36d0fb0af5da" (UID: "8bca29e3-7697-4d29-b7f7-36d0fb0af5da"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.664453 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-hqkqq"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.686043 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-config" (OuterVolumeSpecName: "config") pod "8bca29e3-7697-4d29-b7f7-36d0fb0af5da" (UID: "8bca29e3-7697-4d29-b7f7-36d0fb0af5da"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.689265 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.689295 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfcsh\" (UniqueName: \"kubernetes.io/projected/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-kube-api-access-wfcsh\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.689304 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.689313 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.693668 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-7w6wd"] Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.738841 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8bca29e3-7697-4d29-b7f7-36d0fb0af5da" (UID: "8bca29e3-7697-4d29-b7f7-36d0fb0af5da"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:38 crc kubenswrapper[4669]: I1210 15:38:38.819939 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8bca29e3-7697-4d29-b7f7-36d0fb0af5da-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.170019 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" event={"ID":"8bca29e3-7697-4d29-b7f7-36d0fb0af5da","Type":"ContainerDied","Data":"ba7cfd2511ff0e8432b138b1881e258bb1c9d1f2a528e446223033a593f17df8"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.170073 4669 scope.go:117] "RemoveContainer" containerID="aa318fe21074273c1630ed760013878b5b8ee5954febd93902245f07a80b3728" Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.170125 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-67795cd9-6gfbf" Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.171584 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8cqw2" event={"ID":"20e32d2e-0738-45f0-bb91-b8e48694928b","Type":"ContainerStarted","Data":"5bdd49e8ec4f732bd868c88d4dfdd87772c9754a55674884cda7f420087dde56"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.172890 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xlxc9" event={"ID":"f76ac545-01b9-49f4-a709-7448d8398622","Type":"ContainerStarted","Data":"fa8a7edd13630b037e953e6f5c3a595f97923fdcb0d97d297f4d80184d8c457a"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.172933 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xlxc9" event={"ID":"f76ac545-01b9-49f4-a709-7448d8398622","Type":"ContainerStarted","Data":"2fad3ec92693176329923349532b9e0b87289851b0448bd9abba8ad057a84036"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.174005 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7w6wd" event={"ID":"c69a866b-0a6c-446d-aeea-24a9a6e95efa","Type":"ContainerStarted","Data":"54cf2e6c478884ba9bff7b06e5d8dbe56a9310e5d426eb369eccce027fe0bc89"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.178570 4669 generic.go:334] "Generic (PLEG): container finished" podID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerID="69de436f9822198db669ae09fc8150e9f34f1acf1984feba67308d2b86223f8f" exitCode=0 Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.178610 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" event={"ID":"cb2db0e0-3f9e-4683-96a8-5bab641a60f4","Type":"ContainerDied","Data":"69de436f9822198db669ae09fc8150e9f34f1acf1984feba67308d2b86223f8f"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.178627 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" event={"ID":"cb2db0e0-3f9e-4683-96a8-5bab641a60f4","Type":"ContainerStarted","Data":"3500797c8a4de154bee62f9b4cc785e8af2471d0470ab8851fd1c27a57e44c4b"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.180960 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerStarted","Data":"11c764e6ea5b25c74eb6847e7250c0cd84d1471e3a8beab5c0bc9251c0762c3e"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.181856 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hqkqq" event={"ID":"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f","Type":"ContainerStarted","Data":"746a544b15faad78389a0e3a4dc9db9603d9488d437bcb78c18e80afd4c7b485"} Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.196343 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-xlxc9" podStartSLOduration=4.196326756 podStartE2EDuration="4.196326756s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:39.193807753 +0000 UTC m=+1093.110754370" watchObservedRunningTime="2025-12-10 15:38:39.196326756 +0000 UTC m=+1093.113273383" Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.253523 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-6gfbf"] Dec 10 15:38:39 crc kubenswrapper[4669]: I1210 15:38:39.259638 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-67795cd9-6gfbf"] Dec 10 15:38:40 crc kubenswrapper[4669]: I1210 15:38:40.198764 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" event={"ID":"cb2db0e0-3f9e-4683-96a8-5bab641a60f4","Type":"ContainerStarted","Data":"387a03aa30f3403967139fa70928a428b65fe39ef89c3a5f4c38e377d6b75758"} Dec 10 15:38:40 crc kubenswrapper[4669]: I1210 15:38:40.200275 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:40 crc kubenswrapper[4669]: I1210 15:38:40.223750 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" podStartSLOduration=5.223732419 podStartE2EDuration="5.223732419s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:38:40.221511214 +0000 UTC m=+1094.138457841" watchObservedRunningTime="2025-12-10 15:38:40.223732419 +0000 UTC m=+1094.140679046" Dec 10 15:38:40 crc kubenswrapper[4669]: I1210 15:38:40.408559 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bca29e3-7697-4d29-b7f7-36d0fb0af5da" path="/var/lib/kubelet/pods/8bca29e3-7697-4d29-b7f7-36d0fb0af5da/volumes" Dec 10 15:38:43 crc kubenswrapper[4669]: I1210 15:38:43.251322 4669 generic.go:334] "Generic (PLEG): container finished" podID="f76ac545-01b9-49f4-a709-7448d8398622" containerID="fa8a7edd13630b037e953e6f5c3a595f97923fdcb0d97d297f4d80184d8c457a" exitCode=0 Dec 10 15:38:43 crc kubenswrapper[4669]: I1210 15:38:43.251423 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xlxc9" event={"ID":"f76ac545-01b9-49f4-a709-7448d8398622","Type":"ContainerDied","Data":"fa8a7edd13630b037e953e6f5c3a595f97923fdcb0d97d297f4d80184d8c457a"} Dec 10 15:38:46 crc kubenswrapper[4669]: I1210 15:38:46.144066 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:38:46 crc kubenswrapper[4669]: I1210 15:38:46.209657 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-2xd8l"] Dec 10 15:38:46 crc kubenswrapper[4669]: I1210 15:38:46.209931 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" containerID="cri-o://c7192d4b933a04bd5ce8ccdeadfc0c93684d06579ec03708ccf394b6f7616d7c" gracePeriod=10 Dec 10 15:38:47 crc kubenswrapper[4669]: I1210 15:38:47.292540 4669 generic.go:334] "Generic (PLEG): container finished" podID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerID="c7192d4b933a04bd5ce8ccdeadfc0c93684d06579ec03708ccf394b6f7616d7c" exitCode=0 Dec 10 15:38:47 crc kubenswrapper[4669]: I1210 15:38:47.292608 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" event={"ID":"8c5beee8-d55f-4b9a-bf26-1207a5a6264f","Type":"ContainerDied","Data":"c7192d4b933a04bd5ce8ccdeadfc0c93684d06579ec03708ccf394b6f7616d7c"} Dec 10 15:38:47 crc kubenswrapper[4669]: I1210 15:38:47.552697 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.121:5353: connect: connection refused" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.062899 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.178370 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-scripts\") pod \"f76ac545-01b9-49f4-a709-7448d8398622\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.178429 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-credential-keys\") pod \"f76ac545-01b9-49f4-a709-7448d8398622\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.178473 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-fernet-keys\") pod \"f76ac545-01b9-49f4-a709-7448d8398622\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.178516 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-combined-ca-bundle\") pod \"f76ac545-01b9-49f4-a709-7448d8398622\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.178575 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7hdl\" (UniqueName: \"kubernetes.io/projected/f76ac545-01b9-49f4-a709-7448d8398622-kube-api-access-x7hdl\") pod \"f76ac545-01b9-49f4-a709-7448d8398622\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.180059 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-config-data\") pod \"f76ac545-01b9-49f4-a709-7448d8398622\" (UID: \"f76ac545-01b9-49f4-a709-7448d8398622\") " Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.184490 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "f76ac545-01b9-49f4-a709-7448d8398622" (UID: "f76ac545-01b9-49f4-a709-7448d8398622"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.185635 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-scripts" (OuterVolumeSpecName: "scripts") pod "f76ac545-01b9-49f4-a709-7448d8398622" (UID: "f76ac545-01b9-49f4-a709-7448d8398622"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.185790 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "f76ac545-01b9-49f4-a709-7448d8398622" (UID: "f76ac545-01b9-49f4-a709-7448d8398622"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.193019 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f76ac545-01b9-49f4-a709-7448d8398622-kube-api-access-x7hdl" (OuterVolumeSpecName: "kube-api-access-x7hdl") pod "f76ac545-01b9-49f4-a709-7448d8398622" (UID: "f76ac545-01b9-49f4-a709-7448d8398622"). InnerVolumeSpecName "kube-api-access-x7hdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.219363 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f76ac545-01b9-49f4-a709-7448d8398622" (UID: "f76ac545-01b9-49f4-a709-7448d8398622"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.234620 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-config-data" (OuterVolumeSpecName: "config-data") pod "f76ac545-01b9-49f4-a709-7448d8398622" (UID: "f76ac545-01b9-49f4-a709-7448d8398622"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.284348 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.284382 4669 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.284394 4669 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.284405 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.284416 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7hdl\" (UniqueName: \"kubernetes.io/projected/f76ac545-01b9-49f4-a709-7448d8398622-kube-api-access-x7hdl\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.284426 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f76ac545-01b9-49f4-a709-7448d8398622-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.319774 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xlxc9" event={"ID":"f76ac545-01b9-49f4-a709-7448d8398622","Type":"ContainerDied","Data":"2fad3ec92693176329923349532b9e0b87289851b0448bd9abba8ad057a84036"} Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.319809 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fad3ec92693176329923349532b9e0b87289851b0448bd9abba8ad057a84036" Dec 10 15:38:50 crc kubenswrapper[4669]: I1210 15:38:50.319861 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xlxc9" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.164264 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-xlxc9"] Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.171344 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-xlxc9"] Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.254947 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-2w6bt"] Dec 10 15:38:51 crc kubenswrapper[4669]: E1210 15:38:51.255431 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bca29e3-7697-4d29-b7f7-36d0fb0af5da" containerName="init" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.255457 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bca29e3-7697-4d29-b7f7-36d0fb0af5da" containerName="init" Dec 10 15:38:51 crc kubenswrapper[4669]: E1210 15:38:51.255493 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f76ac545-01b9-49f4-a709-7448d8398622" containerName="keystone-bootstrap" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.255503 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f76ac545-01b9-49f4-a709-7448d8398622" containerName="keystone-bootstrap" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.255679 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f76ac545-01b9-49f4-a709-7448d8398622" containerName="keystone-bootstrap" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.255701 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bca29e3-7697-4d29-b7f7-36d0fb0af5da" containerName="init" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.256364 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.260637 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.261017 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.261360 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.261702 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.261870 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9lkp5" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.268862 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2w6bt"] Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.301864 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-combined-ca-bundle\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.301914 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-scripts\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.301959 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96qm2\" (UniqueName: \"kubernetes.io/projected/56b87d2e-68a9-42d1-87c5-68d6010539ea-kube-api-access-96qm2\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.302188 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-fernet-keys\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.302263 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-credential-keys\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.302422 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-config-data\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.403683 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-config-data\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.403750 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-combined-ca-bundle\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.403768 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-scripts\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.403793 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96qm2\" (UniqueName: \"kubernetes.io/projected/56b87d2e-68a9-42d1-87c5-68d6010539ea-kube-api-access-96qm2\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.403864 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-fernet-keys\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.403887 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-credential-keys\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.407908 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-combined-ca-bundle\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.409201 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-config-data\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.410731 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-credential-keys\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.410746 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-fernet-keys\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.420585 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-scripts\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.421314 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96qm2\" (UniqueName: \"kubernetes.io/projected/56b87d2e-68a9-42d1-87c5-68d6010539ea-kube-api-access-96qm2\") pod \"keystone-bootstrap-2w6bt\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:51 crc kubenswrapper[4669]: I1210 15:38:51.591187 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:38:52 crc kubenswrapper[4669]: I1210 15:38:52.410182 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f76ac545-01b9-49f4-a709-7448d8398622" path="/var/lib/kubelet/pods/f76ac545-01b9-49f4-a709-7448d8398622/volumes" Dec 10 15:38:57 crc kubenswrapper[4669]: I1210 15:38:57.385454 4669 generic.go:334] "Generic (PLEG): container finished" podID="0071bd3b-2920-4ba4-bf6e-8d1cacac2591" containerID="c37bd5aa7e9c21d27ec9dc5f4911c5cfc44adaac77bc8321302741a037bf2d08" exitCode=0 Dec 10 15:38:57 crc kubenswrapper[4669]: I1210 15:38:57.385536 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jj4m7" event={"ID":"0071bd3b-2920-4ba4-bf6e-8d1cacac2591","Type":"ContainerDied","Data":"c37bd5aa7e9c21d27ec9dc5f4911c5cfc44adaac77bc8321302741a037bf2d08"} Dec 10 15:38:57 crc kubenswrapper[4669]: I1210 15:38:57.553909 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.121:5353: i/o timeout" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.047962 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.147017 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-dns-svc\") pod \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.147082 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-config\") pod \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.147133 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-nb\") pod \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.147156 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68lsm\" (UniqueName: \"kubernetes.io/projected/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-kube-api-access-68lsm\") pod \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.147227 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-sb\") pod \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\" (UID: \"8c5beee8-d55f-4b9a-bf26-1207a5a6264f\") " Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.153355 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-kube-api-access-68lsm" (OuterVolumeSpecName: "kube-api-access-68lsm") pod "8c5beee8-d55f-4b9a-bf26-1207a5a6264f" (UID: "8c5beee8-d55f-4b9a-bf26-1207a5a6264f"). InnerVolumeSpecName "kube-api-access-68lsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.197129 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8c5beee8-d55f-4b9a-bf26-1207a5a6264f" (UID: "8c5beee8-d55f-4b9a-bf26-1207a5a6264f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.201045 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8c5beee8-d55f-4b9a-bf26-1207a5a6264f" (UID: "8c5beee8-d55f-4b9a-bf26-1207a5a6264f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.209979 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-config" (OuterVolumeSpecName: "config") pod "8c5beee8-d55f-4b9a-bf26-1207a5a6264f" (UID: "8c5beee8-d55f-4b9a-bf26-1207a5a6264f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.218737 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8c5beee8-d55f-4b9a-bf26-1207a5a6264f" (UID: "8c5beee8-d55f-4b9a-bf26-1207a5a6264f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.249122 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.249168 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.249182 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.249194 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.249238 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68lsm\" (UniqueName: \"kubernetes.io/projected/8c5beee8-d55f-4b9a-bf26-1207a5a6264f-kube-api-access-68lsm\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.396277 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.397892 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" event={"ID":"8c5beee8-d55f-4b9a-bf26-1207a5a6264f","Type":"ContainerDied","Data":"1e54eae227f0d85da5cc32a6bd4d6608bcb9e9dc491f34004b0b2eb4f811ea85"} Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.397966 4669 scope.go:117] "RemoveContainer" containerID="c7192d4b933a04bd5ce8ccdeadfc0c93684d06579ec03708ccf394b6f7616d7c" Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.440624 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-2xd8l"] Dec 10 15:38:58 crc kubenswrapper[4669]: I1210 15:38:58.453950 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-554567b4f7-2xd8l"] Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.258652 4669 scope.go:117] "RemoveContainer" containerID="51fc5d636518913c3a6205c5a1fa9b8f2a00c1d19de43a6f85c4d275140c823d" Dec 10 15:38:59 crc kubenswrapper[4669]: E1210 15:38:59.264152 4669 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 10 15:38:59 crc kubenswrapper[4669]: E1210 15:38:59.264429 4669 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xm44b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-8cqw2_openstack(20e32d2e-0738-45f0-bb91-b8e48694928b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 10 15:38:59 crc kubenswrapper[4669]: E1210 15:38:59.265639 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-8cqw2" podUID="20e32d2e-0738-45f0-bb91-b8e48694928b" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.417924 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-jj4m7" event={"ID":"0071bd3b-2920-4ba4-bf6e-8d1cacac2591","Type":"ContainerDied","Data":"291e1b7d42c44f8bcfba01c93e61c749c61236e51761d01ccf2e96a2a2a7e784"} Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.418030 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="291e1b7d42c44f8bcfba01c93e61c749c61236e51761d01ccf2e96a2a2a7e784" Dec 10 15:38:59 crc kubenswrapper[4669]: E1210 15:38:59.433106 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-8cqw2" podUID="20e32d2e-0738-45f0-bb91-b8e48694928b" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.550993 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.572581 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-config\") pod \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.572724 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5dll\" (UniqueName: \"kubernetes.io/projected/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-kube-api-access-c5dll\") pod \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.572834 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-combined-ca-bundle\") pod \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\" (UID: \"0071bd3b-2920-4ba4-bf6e-8d1cacac2591\") " Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.577974 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-kube-api-access-c5dll" (OuterVolumeSpecName: "kube-api-access-c5dll") pod "0071bd3b-2920-4ba4-bf6e-8d1cacac2591" (UID: "0071bd3b-2920-4ba4-bf6e-8d1cacac2591"). InnerVolumeSpecName "kube-api-access-c5dll". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.608986 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0071bd3b-2920-4ba4-bf6e-8d1cacac2591" (UID: "0071bd3b-2920-4ba4-bf6e-8d1cacac2591"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.620277 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-config" (OuterVolumeSpecName: "config") pod "0071bd3b-2920-4ba4-bf6e-8d1cacac2591" (UID: "0071bd3b-2920-4ba4-bf6e-8d1cacac2591"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.677176 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.677272 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.677287 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5dll\" (UniqueName: \"kubernetes.io/projected/0071bd3b-2920-4ba4-bf6e-8d1cacac2591-kube-api-access-c5dll\") on node \"crc\" DevicePath \"\"" Dec 10 15:38:59 crc kubenswrapper[4669]: I1210 15:38:59.692125 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-2w6bt"] Dec 10 15:38:59 crc kubenswrapper[4669]: W1210 15:38:59.700975 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod56b87d2e_68a9_42d1_87c5_68d6010539ea.slice/crio-d45108704e98898f96152a2083d50cbaab63798f398c9d0c1cf89c545ca68583 WatchSource:0}: Error finding container d45108704e98898f96152a2083d50cbaab63798f398c9d0c1cf89c545ca68583: Status 404 returned error can't find the container with id d45108704e98898f96152a2083d50cbaab63798f398c9d0c1cf89c545ca68583 Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.406074 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" path="/var/lib/kubelet/pods/8c5beee8-d55f-4b9a-bf26-1207a5a6264f/volumes" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.428701 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerStarted","Data":"02d94dba0bb873fa665594bca91a6fb63cff5588f54aa9eb9a55dce1eefaf15f"} Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.430415 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hqkqq" event={"ID":"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f","Type":"ContainerStarted","Data":"393da6634b1d6ea86e46c5f6cbe177464e847be19f2ca3640cf1cfe541d97280"} Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.435074 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2w6bt" event={"ID":"56b87d2e-68a9-42d1-87c5-68d6010539ea","Type":"ContainerStarted","Data":"0fa4f99a89b4fbe2866d4865e468969e0bd0d431267a1e8e8ed384e991f7ef9d"} Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.435108 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2w6bt" event={"ID":"56b87d2e-68a9-42d1-87c5-68d6010539ea","Type":"ContainerStarted","Data":"d45108704e98898f96152a2083d50cbaab63798f398c9d0c1cf89c545ca68583"} Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.436936 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-jj4m7" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.437482 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7w6wd" event={"ID":"c69a866b-0a6c-446d-aeea-24a9a6e95efa","Type":"ContainerStarted","Data":"087f49333541075c99219f12683e02fd8739949b2ef01ad6b8f3afedd9a804f5"} Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.452331 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-hqkqq" podStartSLOduration=5.09742236 podStartE2EDuration="25.452315565s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="2025-12-10 15:38:38.848963297 +0000 UTC m=+1092.765909924" lastFinishedPulling="2025-12-10 15:38:59.203856502 +0000 UTC m=+1113.120803129" observedRunningTime="2025-12-10 15:39:00.451165867 +0000 UTC m=+1114.368112494" watchObservedRunningTime="2025-12-10 15:39:00.452315565 +0000 UTC m=+1114.369262192" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.477997 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-7w6wd" podStartSLOduration=5.097514984 podStartE2EDuration="25.477979992s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="2025-12-10 15:38:38.849378077 +0000 UTC m=+1092.766324704" lastFinishedPulling="2025-12-10 15:38:59.229843085 +0000 UTC m=+1113.146789712" observedRunningTime="2025-12-10 15:39:00.474623419 +0000 UTC m=+1114.391570056" watchObservedRunningTime="2025-12-10 15:39:00.477979992 +0000 UTC m=+1114.394926629" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.495538 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-2w6bt" podStartSLOduration=9.495519989 podStartE2EDuration="9.495519989s" podCreationTimestamp="2025-12-10 15:38:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:00.495478298 +0000 UTC m=+1114.412424945" watchObservedRunningTime="2025-12-10 15:39:00.495519989 +0000 UTC m=+1114.412466616" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.826131 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-xz9tf"] Dec 10 15:39:00 crc kubenswrapper[4669]: E1210 15:39:00.826480 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0071bd3b-2920-4ba4-bf6e-8d1cacac2591" containerName="neutron-db-sync" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.826497 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0071bd3b-2920-4ba4-bf6e-8d1cacac2591" containerName="neutron-db-sync" Dec 10 15:39:00 crc kubenswrapper[4669]: E1210 15:39:00.826526 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="init" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.826533 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="init" Dec 10 15:39:00 crc kubenswrapper[4669]: E1210 15:39:00.826542 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.826548 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.826693 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.826712 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="0071bd3b-2920-4ba4-bf6e-8d1cacac2591" containerName="neutron-db-sync" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.827580 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.852030 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-xz9tf"] Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.912500 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.912557 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.912667 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-config\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.912687 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m254f\" (UniqueName: \"kubernetes.io/projected/5328170f-a268-4c18-9012-2b99ec73bbf1-kube-api-access-m254f\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:00 crc kubenswrapper[4669]: I1210 15:39:00.912724 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.019294 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.019401 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.019442 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.019581 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-config\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.019608 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m254f\" (UniqueName: \"kubernetes.io/projected/5328170f-a268-4c18-9012-2b99ec73bbf1-kube-api-access-m254f\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.020593 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-nb\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.020923 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-sb\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.021344 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-config\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.021354 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-dns-svc\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.043558 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m254f\" (UniqueName: \"kubernetes.io/projected/5328170f-a268-4c18-9012-2b99ec73bbf1-kube-api-access-m254f\") pod \"dnsmasq-dns-5f66db59b9-xz9tf\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.109742 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7bdbfc58b4-xzzxl"] Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.110969 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.113563 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.113791 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.113995 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-rvbcm" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.121629 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.146072 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7bdbfc58b4-xzzxl"] Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.178721 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.223022 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-ovndb-tls-certs\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.223108 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-config\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.223131 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-combined-ca-bundle\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.223153 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pz44b\" (UniqueName: \"kubernetes.io/projected/986eb035-7a6f-4395-9baa-fd984f4bb232-kube-api-access-pz44b\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.223179 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-httpd-config\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.324878 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-config\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.325154 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pz44b\" (UniqueName: \"kubernetes.io/projected/986eb035-7a6f-4395-9baa-fd984f4bb232-kube-api-access-pz44b\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.325584 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-combined-ca-bundle\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.326092 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-httpd-config\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.326347 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-ovndb-tls-certs\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.328710 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-config\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.330130 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-ovndb-tls-certs\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.330238 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-combined-ca-bundle\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.338805 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-httpd-config\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.361877 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pz44b\" (UniqueName: \"kubernetes.io/projected/986eb035-7a6f-4395-9baa-fd984f4bb232-kube-api-access-pz44b\") pod \"neutron-7bdbfc58b4-xzzxl\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:01 crc kubenswrapper[4669]: I1210 15:39:01.447870 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:02 crc kubenswrapper[4669]: I1210 15:39:02.554905 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-554567b4f7-2xd8l" podUID="8c5beee8-d55f-4b9a-bf26-1207a5a6264f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.121:5353: i/o timeout" Dec 10 15:39:03 crc kubenswrapper[4669]: W1210 15:39:03.698618 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5328170f_a268_4c18_9012_2b99ec73bbf1.slice/crio-ecf5630772e691a59eb49f74410e74ab00a570059806f1407e49a72bf544fbc5 WatchSource:0}: Error finding container ecf5630772e691a59eb49f74410e74ab00a570059806f1407e49a72bf544fbc5: Status 404 returned error can't find the container with id ecf5630772e691a59eb49f74410e74ab00a570059806f1407e49a72bf544fbc5 Dec 10 15:39:03 crc kubenswrapper[4669]: I1210 15:39:03.698838 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-xz9tf"] Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.009345 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7bdbfc58b4-xzzxl"] Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.522234 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerStarted","Data":"ee9e17c9f85d4e6a1489ac6c3f0c07944b27c4b5069df6f27d3f4355083ec4da"} Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.524013 4669 generic.go:334] "Generic (PLEG): container finished" podID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerID="e7d57de32893d4ecce08d0ac516362edf5af917acefe998111c61e6ea428b698" exitCode=0 Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.524068 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" event={"ID":"5328170f-a268-4c18-9012-2b99ec73bbf1","Type":"ContainerDied","Data":"e7d57de32893d4ecce08d0ac516362edf5af917acefe998111c61e6ea428b698"} Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.524091 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" event={"ID":"5328170f-a268-4c18-9012-2b99ec73bbf1","Type":"ContainerStarted","Data":"ecf5630772e691a59eb49f74410e74ab00a570059806f1407e49a72bf544fbc5"} Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.531872 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bdbfc58b4-xzzxl" event={"ID":"986eb035-7a6f-4395-9baa-fd984f4bb232","Type":"ContainerStarted","Data":"be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259"} Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.531908 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bdbfc58b4-xzzxl" event={"ID":"986eb035-7a6f-4395-9baa-fd984f4bb232","Type":"ContainerStarted","Data":"38cce0ddf1123a9abca7fc3cf3b0bd4effeb625b4f31c094527467463449cdb7"} Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.727541 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-6d8556c6c7-9clqn"] Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.730937 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.734942 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.735124 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.786140 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d8556c6c7-9clqn"] Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826758 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-combined-ca-bundle\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826821 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-config\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826860 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-public-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826891 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-internal-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826915 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fpbrr\" (UniqueName: \"kubernetes.io/projected/46bed994-2127-48c1-9776-b303e6ea6bc7-kube-api-access-fpbrr\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826948 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-httpd-config\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.826992 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-ovndb-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.927913 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-public-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.927971 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-internal-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.927991 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fpbrr\" (UniqueName: \"kubernetes.io/projected/46bed994-2127-48c1-9776-b303e6ea6bc7-kube-api-access-fpbrr\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.928023 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-httpd-config\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.928066 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-ovndb-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.928125 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-combined-ca-bundle\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.928155 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-config\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.938309 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-config\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.938945 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-httpd-config\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.943071 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-ovndb-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.943159 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-internal-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.946075 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-combined-ca-bundle\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.947242 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/46bed994-2127-48c1-9776-b303e6ea6bc7-public-tls-certs\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:04 crc kubenswrapper[4669]: I1210 15:39:04.952494 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fpbrr\" (UniqueName: \"kubernetes.io/projected/46bed994-2127-48c1-9776-b303e6ea6bc7-kube-api-access-fpbrr\") pod \"neutron-6d8556c6c7-9clqn\" (UID: \"46bed994-2127-48c1-9776-b303e6ea6bc7\") " pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.113192 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.565480 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" event={"ID":"5328170f-a268-4c18-9012-2b99ec73bbf1","Type":"ContainerStarted","Data":"84ec240b32c770e55ae43e4010374b2f77bcbfe82efad9e5d24fe96404ae9d27"} Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.566686 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.575118 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bdbfc58b4-xzzxl" event={"ID":"986eb035-7a6f-4395-9baa-fd984f4bb232","Type":"ContainerStarted","Data":"e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e"} Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.575771 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.613741 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7bdbfc58b4-xzzxl" podStartSLOduration=4.61372051 podStartE2EDuration="4.61372051s" podCreationTimestamp="2025-12-10 15:39:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:05.610090981 +0000 UTC m=+1119.527037608" watchObservedRunningTime="2025-12-10 15:39:05.61372051 +0000 UTC m=+1119.530667137" Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.616290 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" podStartSLOduration=5.616259431 podStartE2EDuration="5.616259431s" podCreationTimestamp="2025-12-10 15:39:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:05.592507602 +0000 UTC m=+1119.509454229" watchObservedRunningTime="2025-12-10 15:39:05.616259431 +0000 UTC m=+1119.533206068" Dec 10 15:39:05 crc kubenswrapper[4669]: I1210 15:39:05.796131 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-6d8556c6c7-9clqn"] Dec 10 15:39:05 crc kubenswrapper[4669]: W1210 15:39:05.806934 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46bed994_2127_48c1_9776_b303e6ea6bc7.slice/crio-4eedc05e7a15b4de901f625154e811f677338f54a0d7a8e7b3d2b5ec7d16c560 WatchSource:0}: Error finding container 4eedc05e7a15b4de901f625154e811f677338f54a0d7a8e7b3d2b5ec7d16c560: Status 404 returned error can't find the container with id 4eedc05e7a15b4de901f625154e811f677338f54a0d7a8e7b3d2b5ec7d16c560 Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.585991 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d8556c6c7-9clqn" event={"ID":"46bed994-2127-48c1-9776-b303e6ea6bc7","Type":"ContainerStarted","Data":"803b7e987221522d9e0918af198106337752d27b90f8182171388bdb9c9b074b"} Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.586338 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d8556c6c7-9clqn" event={"ID":"46bed994-2127-48c1-9776-b303e6ea6bc7","Type":"ContainerStarted","Data":"e21192364a3b0f8f5e6bf2e804ee7e69e5fe08d78d5f4737ac9f9e5130afac94"} Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.586351 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-6d8556c6c7-9clqn" event={"ID":"46bed994-2127-48c1-9776-b303e6ea6bc7","Type":"ContainerStarted","Data":"4eedc05e7a15b4de901f625154e811f677338f54a0d7a8e7b3d2b5ec7d16c560"} Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.586536 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.590650 4669 generic.go:334] "Generic (PLEG): container finished" podID="56b87d2e-68a9-42d1-87c5-68d6010539ea" containerID="0fa4f99a89b4fbe2866d4865e468969e0bd0d431267a1e8e8ed384e991f7ef9d" exitCode=0 Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.590704 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2w6bt" event={"ID":"56b87d2e-68a9-42d1-87c5-68d6010539ea","Type":"ContainerDied","Data":"0fa4f99a89b4fbe2866d4865e468969e0bd0d431267a1e8e8ed384e991f7ef9d"} Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.592575 4669 generic.go:334] "Generic (PLEG): container finished" podID="c69a866b-0a6c-446d-aeea-24a9a6e95efa" containerID="087f49333541075c99219f12683e02fd8739949b2ef01ad6b8f3afedd9a804f5" exitCode=0 Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.592634 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7w6wd" event={"ID":"c69a866b-0a6c-446d-aeea-24a9a6e95efa","Type":"ContainerDied","Data":"087f49333541075c99219f12683e02fd8739949b2ef01ad6b8f3afedd9a804f5"} Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.594520 4669 generic.go:334] "Generic (PLEG): container finished" podID="c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" containerID="393da6634b1d6ea86e46c5f6cbe177464e847be19f2ca3640cf1cfe541d97280" exitCode=0 Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.595239 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hqkqq" event={"ID":"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f","Type":"ContainerDied","Data":"393da6634b1d6ea86e46c5f6cbe177464e847be19f2ca3640cf1cfe541d97280"} Dec 10 15:39:06 crc kubenswrapper[4669]: I1210 15:39:06.611318 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-6d8556c6c7-9clqn" podStartSLOduration=2.611301166 podStartE2EDuration="2.611301166s" podCreationTimestamp="2025-12-10 15:39:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:06.607129865 +0000 UTC m=+1120.524076492" watchObservedRunningTime="2025-12-10 15:39:06.611301166 +0000 UTC m=+1120.528247793" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.631512 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-2w6bt" event={"ID":"56b87d2e-68a9-42d1-87c5-68d6010539ea","Type":"ContainerDied","Data":"d45108704e98898f96152a2083d50cbaab63798f398c9d0c1cf89c545ca68583"} Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.632951 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d45108704e98898f96152a2083d50cbaab63798f398c9d0c1cf89c545ca68583" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.635294 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-7w6wd" event={"ID":"c69a866b-0a6c-446d-aeea-24a9a6e95efa","Type":"ContainerDied","Data":"54cf2e6c478884ba9bff7b06e5d8dbe56a9310e5d426eb369eccce027fe0bc89"} Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.635384 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="54cf2e6c478884ba9bff7b06e5d8dbe56a9310e5d426eb369eccce027fe0bc89" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.637582 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-hqkqq" event={"ID":"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f","Type":"ContainerDied","Data":"746a544b15faad78389a0e3a4dc9db9603d9488d437bcb78c18e80afd4c7b485"} Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.637668 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="746a544b15faad78389a0e3a4dc9db9603d9488d437bcb78c18e80afd4c7b485" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.638151 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.663550 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.669647 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hqkqq" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757304 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ktbrr\" (UniqueName: \"kubernetes.io/projected/c69a866b-0a6c-446d-aeea-24a9a6e95efa-kube-api-access-ktbrr\") pod \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757343 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cblx8\" (UniqueName: \"kubernetes.io/projected/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-kube-api-access-cblx8\") pod \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757396 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-scripts\") pod \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757422 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-db-sync-config-data\") pod \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757469 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96qm2\" (UniqueName: \"kubernetes.io/projected/56b87d2e-68a9-42d1-87c5-68d6010539ea-kube-api-access-96qm2\") pod \"56b87d2e-68a9-42d1-87c5-68d6010539ea\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757497 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-combined-ca-bundle\") pod \"56b87d2e-68a9-42d1-87c5-68d6010539ea\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757532 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-logs\") pod \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757652 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-combined-ca-bundle\") pod \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757723 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-scripts\") pod \"56b87d2e-68a9-42d1-87c5-68d6010539ea\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757774 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-credential-keys\") pod \"56b87d2e-68a9-42d1-87c5-68d6010539ea\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757892 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-combined-ca-bundle\") pod \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\" (UID: \"c69a866b-0a6c-446d-aeea-24a9a6e95efa\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757935 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-config-data\") pod \"56b87d2e-68a9-42d1-87c5-68d6010539ea\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.757978 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-fernet-keys\") pod \"56b87d2e-68a9-42d1-87c5-68d6010539ea\" (UID: \"56b87d2e-68a9-42d1-87c5-68d6010539ea\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.758037 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-config-data\") pod \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\" (UID: \"c9641c8e-77a7-47c5-b7f4-16d6e7061c5f\") " Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.759485 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-logs" (OuterVolumeSpecName: "logs") pod "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" (UID: "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.766610 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56b87d2e-68a9-42d1-87c5-68d6010539ea-kube-api-access-96qm2" (OuterVolumeSpecName: "kube-api-access-96qm2") pod "56b87d2e-68a9-42d1-87c5-68d6010539ea" (UID: "56b87d2e-68a9-42d1-87c5-68d6010539ea"). InnerVolumeSpecName "kube-api-access-96qm2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.766869 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c69a866b-0a6c-446d-aeea-24a9a6e95efa" (UID: "c69a866b-0a6c-446d-aeea-24a9a6e95efa"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.770376 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c69a866b-0a6c-446d-aeea-24a9a6e95efa-kube-api-access-ktbrr" (OuterVolumeSpecName: "kube-api-access-ktbrr") pod "c69a866b-0a6c-446d-aeea-24a9a6e95efa" (UID: "c69a866b-0a6c-446d-aeea-24a9a6e95efa"). InnerVolumeSpecName "kube-api-access-ktbrr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.771383 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "56b87d2e-68a9-42d1-87c5-68d6010539ea" (UID: "56b87d2e-68a9-42d1-87c5-68d6010539ea"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.772629 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-kube-api-access-cblx8" (OuterVolumeSpecName: "kube-api-access-cblx8") pod "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" (UID: "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f"). InnerVolumeSpecName "kube-api-access-cblx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.800699 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-scripts" (OuterVolumeSpecName: "scripts") pod "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" (UID: "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.801102 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-scripts" (OuterVolumeSpecName: "scripts") pod "56b87d2e-68a9-42d1-87c5-68d6010539ea" (UID: "56b87d2e-68a9-42d1-87c5-68d6010539ea"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.802358 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "56b87d2e-68a9-42d1-87c5-68d6010539ea" (UID: "56b87d2e-68a9-42d1-87c5-68d6010539ea"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.816741 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c69a866b-0a6c-446d-aeea-24a9a6e95efa" (UID: "c69a866b-0a6c-446d-aeea-24a9a6e95efa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.819253 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" (UID: "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.822442 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56b87d2e-68a9-42d1-87c5-68d6010539ea" (UID: "56b87d2e-68a9-42d1-87c5-68d6010539ea"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.823894 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-config-data" (OuterVolumeSpecName: "config-data") pod "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" (UID: "c9641c8e-77a7-47c5-b7f4-16d6e7061c5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.824323 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-config-data" (OuterVolumeSpecName: "config-data") pod "56b87d2e-68a9-42d1-87c5-68d6010539ea" (UID: "56b87d2e-68a9-42d1-87c5-68d6010539ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.860564 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.860767 4669 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.860906 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96qm2\" (UniqueName: \"kubernetes.io/projected/56b87d2e-68a9-42d1-87c5-68d6010539ea-kube-api-access-96qm2\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.860964 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861012 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861067 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861160 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861262 4669 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861324 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c69a866b-0a6c-446d-aeea-24a9a6e95efa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861374 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861443 4669 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/56b87d2e-68a9-42d1-87c5-68d6010539ea-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861525 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861630 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ktbrr\" (UniqueName: \"kubernetes.io/projected/c69a866b-0a6c-446d-aeea-24a9a6e95efa-kube-api-access-ktbrr\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:10 crc kubenswrapper[4669]: I1210 15:39:10.861688 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cblx8\" (UniqueName: \"kubernetes.io/projected/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f-kube-api-access-cblx8\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.181304 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.270726 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-j8blj"] Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.270970 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerName="dnsmasq-dns" containerID="cri-o://387a03aa30f3403967139fa70928a428b65fe39ef89c3a5f4c38e377d6b75758" gracePeriod=10 Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.671349 4669 generic.go:334] "Generic (PLEG): container finished" podID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerID="387a03aa30f3403967139fa70928a428b65fe39ef89c3a5f4c38e377d6b75758" exitCode=0 Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.671619 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" event={"ID":"cb2db0e0-3f9e-4683-96a8-5bab641a60f4","Type":"ContainerDied","Data":"387a03aa30f3403967139fa70928a428b65fe39ef89c3a5f4c38e377d6b75758"} Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.692871 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-7w6wd" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.695945 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerStarted","Data":"f5ceb0ad15facabb73d1b75502e5e56dd776dac5e4d377c388824af406d313a8"} Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.696005 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-2w6bt" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.713469 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-hqkqq" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.908811 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-659884ff9-rxsts"] Dec 10 15:39:11 crc kubenswrapper[4669]: E1210 15:39:11.909140 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56b87d2e-68a9-42d1-87c5-68d6010539ea" containerName="keystone-bootstrap" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.909157 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="56b87d2e-68a9-42d1-87c5-68d6010539ea" containerName="keystone-bootstrap" Dec 10 15:39:11 crc kubenswrapper[4669]: E1210 15:39:11.909177 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" containerName="placement-db-sync" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.909184 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" containerName="placement-db-sync" Dec 10 15:39:11 crc kubenswrapper[4669]: E1210 15:39:11.909197 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c69a866b-0a6c-446d-aeea-24a9a6e95efa" containerName="barbican-db-sync" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.909203 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c69a866b-0a6c-446d-aeea-24a9a6e95efa" containerName="barbican-db-sync" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.910446 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" containerName="placement-db-sync" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.910480 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="56b87d2e-68a9-42d1-87c5-68d6010539ea" containerName="keystone-bootstrap" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.910498 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c69a866b-0a6c-446d-aeea-24a9a6e95efa" containerName="barbican-db-sync" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.910710 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.911038 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.924020 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.924208 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.924414 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.924519 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-9lkp5" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.924612 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.924702 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 10 15:39:11 crc kubenswrapper[4669]: I1210 15:39:11.936679 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-659884ff9-rxsts"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.001935 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-config\") pod \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.002243 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rdnr\" (UniqueName: \"kubernetes.io/projected/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-kube-api-access-2rdnr\") pod \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.002432 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-sb\") pod \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.002539 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-dns-svc\") pod \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.002669 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-nb\") pod \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\" (UID: \"cb2db0e0-3f9e-4683-96a8-5bab641a60f4\") " Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.002958 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-scripts\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003341 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdstd\" (UniqueName: \"kubernetes.io/projected/cb398931-6065-4b00-b312-b2fcfda385ab-kube-api-access-vdstd\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003427 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-fernet-keys\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003510 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-internal-tls-certs\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003581 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-credential-keys\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003642 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-config-data\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003716 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-combined-ca-bundle\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.003798 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-public-tls-certs\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.038915 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-dc69454c7-zsnjz"] Dec 10 15:39:12 crc kubenswrapper[4669]: E1210 15:39:12.039480 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerName="init" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.039493 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerName="init" Dec 10 15:39:12 crc kubenswrapper[4669]: E1210 15:39:12.039513 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerName="dnsmasq-dns" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.039519 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerName="dnsmasq-dns" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.044145 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" containerName="dnsmasq-dns" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.050300 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.070446 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-kube-api-access-2rdnr" (OuterVolumeSpecName: "kube-api-access-2rdnr") pod "cb2db0e0-3f9e-4683-96a8-5bab641a60f4" (UID: "cb2db0e0-3f9e-4683-96a8-5bab641a60f4"). InnerVolumeSpecName "kube-api-access-2rdnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.073271 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.073588 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-59rnr" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.089886 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.106980 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-scripts\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107038 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt6wp\" (UniqueName: \"kubernetes.io/projected/87675b91-d647-453f-bb63-2d10ddb27991-kube-api-access-kt6wp\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107069 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdstd\" (UniqueName: \"kubernetes.io/projected/cb398931-6065-4b00-b312-b2fcfda385ab-kube-api-access-vdstd\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107096 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-fernet-keys\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107122 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-config-data-custom\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107142 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87675b91-d647-453f-bb63-2d10ddb27991-logs\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107164 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-internal-tls-certs\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107189 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-combined-ca-bundle\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107207 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-credential-keys\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107305 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-config-data\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107340 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-combined-ca-bundle\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107372 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-config-data\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107406 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-public-tls-certs\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.107472 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rdnr\" (UniqueName: \"kubernetes.io/projected/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-kube-api-access-2rdnr\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.127730 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-d6f8b949d-tbds5"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.130011 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.130973 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-credential-keys\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.131410 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-scripts\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.144026 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.150848 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-6556f5d7cd-dgcb9"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.153970 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-config-data\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.161872 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-combined-ca-bundle\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.163401 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-fernet-keys\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.164784 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.168657 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-dc69454c7-zsnjz"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.172807 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-public-tls-certs\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.173567 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cb398931-6065-4b00-b312-b2fcfda385ab-internal-tls-certs\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.173846 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.174019 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.174125 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-q2jg7" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.174259 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.174401 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.187896 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d6f8b949d-tbds5"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208486 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-config-data\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208536 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-combined-ca-bundle\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208561 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd2z2\" (UniqueName: \"kubernetes.io/projected/7fe33769-9a18-405c-a7a8-e1fbcb719fff-kube-api-access-sd2z2\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208597 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-config-data-custom\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208618 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87675b91-d647-453f-bb63-2d10ddb27991-logs\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208655 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-internal-tls-certs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208673 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-combined-ca-bundle\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208696 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-config-data\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208710 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-config-data-custom\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208727 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-config-data\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208750 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-scripts\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208794 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-combined-ca-bundle\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208810 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a62c0c4-e96b-486b-8660-5a797598341b-logs\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208841 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe33769-9a18-405c-a7a8-e1fbcb719fff-logs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208862 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-public-tls-certs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208911 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggblj\" (UniqueName: \"kubernetes.io/projected/8a62c0c4-e96b-486b-8660-5a797598341b-kube-api-access-ggblj\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.208944 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt6wp\" (UniqueName: \"kubernetes.io/projected/87675b91-d647-453f-bb63-2d10ddb27991-kube-api-access-kt6wp\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.213665 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/87675b91-d647-453f-bb63-2d10ddb27991-logs\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.227683 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdstd\" (UniqueName: \"kubernetes.io/projected/cb398931-6065-4b00-b312-b2fcfda385ab-kube-api-access-vdstd\") pod \"keystone-659884ff9-rxsts\" (UID: \"cb398931-6065-4b00-b312-b2fcfda385ab\") " pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.245410 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6556f5d7cd-dgcb9"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.249920 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-combined-ca-bundle\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.264183 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.264508 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt6wp\" (UniqueName: \"kubernetes.io/projected/87675b91-d647-453f-bb63-2d10ddb27991-kube-api-access-kt6wp\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.270585 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-config-data-custom\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.303818 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cb2db0e0-3f9e-4683-96a8-5bab641a60f4" (UID: "cb2db0e0-3f9e-4683-96a8-5bab641a60f4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.311038 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-internal-tls-certs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.311104 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-config-data\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.311128 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-config-data-custom\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.311171 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-scripts\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.321121 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-config-data\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.311192 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-combined-ca-bundle\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323076 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a62c0c4-e96b-486b-8660-5a797598341b-logs\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323163 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe33769-9a18-405c-a7a8-e1fbcb719fff-logs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323200 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-public-tls-certs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323346 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggblj\" (UniqueName: \"kubernetes.io/projected/8a62c0c4-e96b-486b-8660-5a797598341b-kube-api-access-ggblj\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323451 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-config-data\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323499 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-combined-ca-bundle\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323530 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd2z2\" (UniqueName: \"kubernetes.io/projected/7fe33769-9a18-405c-a7a8-e1fbcb719fff-kube-api-access-sd2z2\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323638 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323669 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7fe33769-9a18-405c-a7a8-e1fbcb719fff-logs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.323941 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a62c0c4-e96b-486b-8660-5a797598341b-logs\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.334703 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-internal-tls-certs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.342634 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-scripts\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.342776 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-config-data-custom\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.352573 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd2z2\" (UniqueName: \"kubernetes.io/projected/7fe33769-9a18-405c-a7a8-e1fbcb719fff-kube-api-access-sd2z2\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.355583 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-869f779d85-ff92t"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.356835 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a62c0c4-e96b-486b-8660-5a797598341b-combined-ca-bundle\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.357318 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.358664 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggblj\" (UniqueName: \"kubernetes.io/projected/8a62c0c4-e96b-486b-8660-5a797598341b-kube-api-access-ggblj\") pod \"barbican-keystone-listener-d6f8b949d-tbds5\" (UID: \"8a62c0c4-e96b-486b-8660-5a797598341b\") " pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.359903 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/87675b91-d647-453f-bb63-2d10ddb27991-config-data\") pod \"barbican-worker-dc69454c7-zsnjz\" (UID: \"87675b91-d647-453f-bb63-2d10ddb27991\") " pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.370372 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-config-data\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.376941 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-combined-ca-bundle\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.383512 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cb2db0e0-3f9e-4683-96a8-5bab641a60f4" (UID: "cb2db0e0-3f9e-4683-96a8-5bab641a60f4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.383568 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-ff92t"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.392078 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7fe33769-9a18-405c-a7a8-e1fbcb719fff-public-tls-certs\") pod \"placement-6556f5d7cd-dgcb9\" (UID: \"7fe33769-9a18-405c-a7a8-e1fbcb719fff\") " pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.392833 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-config" (OuterVolumeSpecName: "config") pod "cb2db0e0-3f9e-4683-96a8-5bab641a60f4" (UID: "cb2db0e0-3f9e-4683-96a8-5bab641a60f4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424761 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-dns-svc\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424818 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bxw2\" (UniqueName: \"kubernetes.io/projected/1256f642-992a-4d14-b552-5b471de3a211-kube-api-access-7bxw2\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424873 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-config\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424900 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424927 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424972 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.424981 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.435410 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cb2db0e0-3f9e-4683-96a8-5bab641a60f4" (UID: "cb2db0e0-3f9e-4683-96a8-5bab641a60f4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.469843 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-dc69454c7-zsnjz" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.488182 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.498848 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.526811 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-dns-svc\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.527187 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bxw2\" (UniqueName: \"kubernetes.io/projected/1256f642-992a-4d14-b552-5b471de3a211-kube-api-access-7bxw2\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.528538 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-dns-svc\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.528591 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-config\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.528623 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.528651 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.528772 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cb2db0e0-3f9e-4683-96a8-5bab641a60f4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.529316 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-sb\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.530341 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-nb\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.530844 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-config\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.579789 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bxw2\" (UniqueName: \"kubernetes.io/projected/1256f642-992a-4d14-b552-5b471de3a211-kube-api-access-7bxw2\") pod \"dnsmasq-dns-869f779d85-ff92t\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.592082 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5c87fd57c8-hv4dt"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.593879 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.598352 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.630787 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nmld\" (UniqueName: \"kubernetes.io/projected/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-kube-api-access-8nmld\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.630886 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.630921 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-logs\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.631003 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data-custom\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.631047 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-combined-ca-bundle\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.652993 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c87fd57c8-hv4dt"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.689639 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.733118 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.733164 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-logs\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.733278 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data-custom\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.733312 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-combined-ca-bundle\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.733349 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nmld\" (UniqueName: \"kubernetes.io/projected/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-kube-api-access-8nmld\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.734603 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-logs\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.746002 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-combined-ca-bundle\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.749437 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.761741 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data-custom\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.795264 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nmld\" (UniqueName: \"kubernetes.io/projected/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-kube-api-access-8nmld\") pod \"barbican-api-5c87fd57c8-hv4dt\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.815832 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" event={"ID":"cb2db0e0-3f9e-4683-96a8-5bab641a60f4","Type":"ContainerDied","Data":"3500797c8a4de154bee62f9b4cc785e8af2471d0470ab8851fd1c27a57e44c4b"} Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.815882 4669 scope.go:117] "RemoveContainer" containerID="387a03aa30f3403967139fa70928a428b65fe39ef89c3a5f4c38e377d6b75758" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.816024 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5b6dbdb6f5-j8blj" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.829475 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8cqw2" event={"ID":"20e32d2e-0738-45f0-bb91-b8e48694928b","Type":"ContainerStarted","Data":"9d4ec21be6882f695adae6225c4e2409e150ae6980432998e5b3afaba137946f"} Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.856064 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-j8blj"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.900056 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-8cqw2" podStartSLOduration=5.242229628 podStartE2EDuration="37.900036801s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="2025-12-10 15:38:38.482343922 +0000 UTC m=+1092.399290539" lastFinishedPulling="2025-12-10 15:39:11.140151085 +0000 UTC m=+1125.057097712" observedRunningTime="2025-12-10 15:39:12.875036621 +0000 UTC m=+1126.791983248" watchObservedRunningTime="2025-12-10 15:39:12.900036801 +0000 UTC m=+1126.816983428" Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.901085 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5b6dbdb6f5-j8blj"] Dec 10 15:39:12 crc kubenswrapper[4669]: I1210 15:39:12.928044 4669 scope.go:117] "RemoveContainer" containerID="69de436f9822198db669ae09fc8150e9f34f1acf1984feba67308d2b86223f8f" Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.059264 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-659884ff9-rxsts"] Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.060421 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:13 crc kubenswrapper[4669]: W1210 15:39:13.109368 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcb398931_6065_4b00_b312_b2fcfda385ab.slice/crio-0a6e1a50eac41ed074c771e199fb249089e9b250b6411888a840f601e637c1ca WatchSource:0}: Error finding container 0a6e1a50eac41ed074c771e199fb249089e9b250b6411888a840f601e637c1ca: Status 404 returned error can't find the container with id 0a6e1a50eac41ed074c771e199fb249089e9b250b6411888a840f601e637c1ca Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.370002 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-dc69454c7-zsnjz"] Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.499847 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-6556f5d7cd-dgcb9"] Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.703380 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d6f8b949d-tbds5"] Dec 10 15:39:13 crc kubenswrapper[4669]: W1210 15:39:13.710471 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a62c0c4_e96b_486b_8660_5a797598341b.slice/crio-a2b2fcd5610c1a3b8918efaeb7d8128681b1a0895f4e6b846bdc64fd619f3b24 WatchSource:0}: Error finding container a2b2fcd5610c1a3b8918efaeb7d8128681b1a0895f4e6b846bdc64fd619f3b24: Status 404 returned error can't find the container with id a2b2fcd5610c1a3b8918efaeb7d8128681b1a0895f4e6b846bdc64fd619f3b24 Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.739128 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-ff92t"] Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.858885 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-ff92t" event={"ID":"1256f642-992a-4d14-b552-5b471de3a211","Type":"ContainerStarted","Data":"38cf59f3eec2d765a60e079a2aee9c71ccf93b4b5ad1d79cdb6ebffa6dccffcb"} Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.862621 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6556f5d7cd-dgcb9" event={"ID":"7fe33769-9a18-405c-a7a8-e1fbcb719fff","Type":"ContainerStarted","Data":"a5598177e823365f69ff10b6bd59913df2c0fbaf47d2111354168b5c414812e5"} Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.864768 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-659884ff9-rxsts" event={"ID":"cb398931-6065-4b00-b312-b2fcfda385ab","Type":"ContainerStarted","Data":"043a39d989c013ba8208b53fddb933aed7033b65d1e0639b5782b290dd6ebd41"} Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.864794 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-659884ff9-rxsts" event={"ID":"cb398931-6065-4b00-b312-b2fcfda385ab","Type":"ContainerStarted","Data":"0a6e1a50eac41ed074c771e199fb249089e9b250b6411888a840f601e637c1ca"} Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.866396 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.871368 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" event={"ID":"8a62c0c4-e96b-486b-8660-5a797598341b","Type":"ContainerStarted","Data":"a2b2fcd5610c1a3b8918efaeb7d8128681b1a0895f4e6b846bdc64fd619f3b24"} Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.891414 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-dc69454c7-zsnjz" event={"ID":"87675b91-d647-453f-bb63-2d10ddb27991","Type":"ContainerStarted","Data":"b69378abe730a9b72b7bfcff4160820fb1eb698685deff097e72f8a6afd26f63"} Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.896315 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-659884ff9-rxsts" podStartSLOduration=2.896296785 podStartE2EDuration="2.896296785s" podCreationTimestamp="2025-12-10 15:39:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:13.896257354 +0000 UTC m=+1127.813203981" watchObservedRunningTime="2025-12-10 15:39:13.896296785 +0000 UTC m=+1127.813243402" Dec 10 15:39:13 crc kubenswrapper[4669]: I1210 15:39:13.957055 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5c87fd57c8-hv4dt"] Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.421614 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cb2db0e0-3f9e-4683-96a8-5bab641a60f4" path="/var/lib/kubelet/pods/cb2db0e0-3f9e-4683-96a8-5bab641a60f4/volumes" Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.957772 4669 generic.go:334] "Generic (PLEG): container finished" podID="1256f642-992a-4d14-b552-5b471de3a211" containerID="31636140f90747493905ceaa0c90f5613bffbd7e283db7ab54f637944582aac7" exitCode=0 Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.958083 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-ff92t" event={"ID":"1256f642-992a-4d14-b552-5b471de3a211","Type":"ContainerDied","Data":"31636140f90747493905ceaa0c90f5613bffbd7e283db7ab54f637944582aac7"} Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.991465 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6556f5d7cd-dgcb9" event={"ID":"7fe33769-9a18-405c-a7a8-e1fbcb719fff","Type":"ContainerStarted","Data":"f4633d0076df11dd14eb35d0b39dc6ab7dc795ab2fe7de5b34030e23c95a7869"} Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.991522 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-6556f5d7cd-dgcb9" event={"ID":"7fe33769-9a18-405c-a7a8-e1fbcb719fff","Type":"ContainerStarted","Data":"9fdad85594bd5c26f5711339a648dda094a16560b446b5f969a2ac65e96be5a6"} Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.991826 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:14 crc kubenswrapper[4669]: I1210 15:39:14.991867 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:15 crc kubenswrapper[4669]: I1210 15:39:15.014782 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c87fd57c8-hv4dt" event={"ID":"db53e274-36cd-48a1-b907-ba4ed4e5d7e7","Type":"ContainerStarted","Data":"153610108aceb7cf5653d3945d1e1b6277dce157b5b587f68baf54874374966e"} Dec 10 15:39:15 crc kubenswrapper[4669]: I1210 15:39:15.014838 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c87fd57c8-hv4dt" event={"ID":"db53e274-36cd-48a1-b907-ba4ed4e5d7e7","Type":"ContainerStarted","Data":"22350a5525fc09baa7405238497ec6b59731b91188079f0f90fd57ce970f2172"} Dec 10 15:39:15 crc kubenswrapper[4669]: I1210 15:39:15.014857 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c87fd57c8-hv4dt" event={"ID":"db53e274-36cd-48a1-b907-ba4ed4e5d7e7","Type":"ContainerStarted","Data":"2a5ba981ef162a714ed62fc258b986c3aac93fd120f8229f8014171433e2a9c0"} Dec 10 15:39:15 crc kubenswrapper[4669]: I1210 15:39:15.014899 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:15 crc kubenswrapper[4669]: I1210 15:39:15.015204 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:15 crc kubenswrapper[4669]: I1210 15:39:15.033672 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-6556f5d7cd-dgcb9" podStartSLOduration=3.03365614 podStartE2EDuration="3.03365614s" podCreationTimestamp="2025-12-10 15:39:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:15.029597972 +0000 UTC m=+1128.946544589" watchObservedRunningTime="2025-12-10 15:39:15.03365614 +0000 UTC m=+1128.950602767" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.217325 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5c87fd57c8-hv4dt" podStartSLOduration=4.217302894 podStartE2EDuration="4.217302894s" podCreationTimestamp="2025-12-10 15:39:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:15.056395755 +0000 UTC m=+1128.973342382" watchObservedRunningTime="2025-12-10 15:39:16.217302894 +0000 UTC m=+1130.134249521" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.262487 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-547d56d5c6-827zl"] Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.276012 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.278634 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.282107 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.289207 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-547d56d5c6-827zl"] Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.455775 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r58pd\" (UniqueName: \"kubernetes.io/projected/e492df2a-6b67-4562-96cd-3c7495e7f9b3-kube-api-access-r58pd\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.455842 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-config-data-custom\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.455917 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-public-tls-certs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.455941 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-config-data\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.455963 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-combined-ca-bundle\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.455987 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-internal-tls-certs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.456011 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e492df2a-6b67-4562-96cd-3c7495e7f9b3-logs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557399 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r58pd\" (UniqueName: \"kubernetes.io/projected/e492df2a-6b67-4562-96cd-3c7495e7f9b3-kube-api-access-r58pd\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557505 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-config-data-custom\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557598 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-public-tls-certs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557619 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-config-data\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557644 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-combined-ca-bundle\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557689 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-internal-tls-certs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.557716 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e492df2a-6b67-4562-96cd-3c7495e7f9b3-logs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.558165 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e492df2a-6b67-4562-96cd-3c7495e7f9b3-logs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.564637 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-internal-tls-certs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.565575 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-config-data-custom\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.567019 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-public-tls-certs\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.576157 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r58pd\" (UniqueName: \"kubernetes.io/projected/e492df2a-6b67-4562-96cd-3c7495e7f9b3-kube-api-access-r58pd\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.576942 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-combined-ca-bundle\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.582166 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e492df2a-6b67-4562-96cd-3c7495e7f9b3-config-data\") pod \"barbican-api-547d56d5c6-827zl\" (UID: \"e492df2a-6b67-4562-96cd-3c7495e7f9b3\") " pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:16 crc kubenswrapper[4669]: I1210 15:39:16.605419 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:17 crc kubenswrapper[4669]: I1210 15:39:17.036413 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-ff92t" event={"ID":"1256f642-992a-4d14-b552-5b471de3a211","Type":"ContainerStarted","Data":"03da3e73be53cf4fcb605a858943b1c99057f8fffdf64de833e1ec603f3d5207"} Dec 10 15:39:17 crc kubenswrapper[4669]: I1210 15:39:17.230725 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-547d56d5c6-827zl"] Dec 10 15:39:18 crc kubenswrapper[4669]: I1210 15:39:18.045950 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547d56d5c6-827zl" event={"ID":"e492df2a-6b67-4562-96cd-3c7495e7f9b3","Type":"ContainerStarted","Data":"6f3787024efbf26976ad79ccbfa37401e586e9f6ee2928c50a68b1246dd503e7"} Dec 10 15:39:18 crc kubenswrapper[4669]: I1210 15:39:18.046247 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547d56d5c6-827zl" event={"ID":"e492df2a-6b67-4562-96cd-3c7495e7f9b3","Type":"ContainerStarted","Data":"09484c8ac6b4cfeec064a829a24cf2315e60079d618699f335bebc8ec5e79726"} Dec 10 15:39:18 crc kubenswrapper[4669]: I1210 15:39:18.046266 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:18 crc kubenswrapper[4669]: I1210 15:39:18.075424 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-869f779d85-ff92t" podStartSLOduration=6.075406185 podStartE2EDuration="6.075406185s" podCreationTimestamp="2025-12-10 15:39:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:18.0682268 +0000 UTC m=+1131.985173427" watchObservedRunningTime="2025-12-10 15:39:18.075406185 +0000 UTC m=+1131.992352812" Dec 10 15:39:21 crc kubenswrapper[4669]: I1210 15:39:21.279433 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-5c87fd57c8-hv4dt" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 10 15:39:22 crc kubenswrapper[4669]: I1210 15:39:22.691403 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:22 crc kubenswrapper[4669]: I1210 15:39:22.753559 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-xz9tf"] Dec 10 15:39:22 crc kubenswrapper[4669]: I1210 15:39:22.753830 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerName="dnsmasq-dns" containerID="cri-o://84ec240b32c770e55ae43e4010374b2f77bcbfe82efad9e5d24fe96404ae9d27" gracePeriod=10 Dec 10 15:39:23 crc kubenswrapper[4669]: I1210 15:39:23.106645 4669 generic.go:334] "Generic (PLEG): container finished" podID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerID="84ec240b32c770e55ae43e4010374b2f77bcbfe82efad9e5d24fe96404ae9d27" exitCode=0 Dec 10 15:39:23 crc kubenswrapper[4669]: I1210 15:39:23.107052 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" event={"ID":"5328170f-a268-4c18-9012-2b99ec73bbf1","Type":"ContainerDied","Data":"84ec240b32c770e55ae43e4010374b2f77bcbfe82efad9e5d24fe96404ae9d27"} Dec 10 15:39:23 crc kubenswrapper[4669]: I1210 15:39:23.112983 4669 generic.go:334] "Generic (PLEG): container finished" podID="20e32d2e-0738-45f0-bb91-b8e48694928b" containerID="9d4ec21be6882f695adae6225c4e2409e150ae6980432998e5b3afaba137946f" exitCode=0 Dec 10 15:39:23 crc kubenswrapper[4669]: I1210 15:39:23.113023 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8cqw2" event={"ID":"20e32d2e-0738-45f0-bb91-b8e48694928b","Type":"ContainerDied","Data":"9d4ec21be6882f695adae6225c4e2409e150ae6980432998e5b3afaba137946f"} Dec 10 15:39:24 crc kubenswrapper[4669]: I1210 15:39:24.881835 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:24 crc kubenswrapper[4669]: I1210 15:39:24.928845 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.151193 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-8cqw2" event={"ID":"20e32d2e-0738-45f0-bb91-b8e48694928b","Type":"ContainerDied","Data":"5bdd49e8ec4f732bd868c88d4dfdd87772c9754a55674884cda7f420087dde56"} Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.151251 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bdd49e8ec4f732bd868c88d4dfdd87772c9754a55674884cda7f420087dde56" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.153661 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" event={"ID":"5328170f-a268-4c18-9012-2b99ec73bbf1","Type":"ContainerDied","Data":"ecf5630772e691a59eb49f74410e74ab00a570059806f1407e49a72bf544fbc5"} Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.153712 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ecf5630772e691a59eb49f74410e74ab00a570059806f1407e49a72bf544fbc5" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.180416 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.244844 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.375955 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-config\") pod \"5328170f-a268-4c18-9012-2b99ec73bbf1\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.376155 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-sb\") pod \"5328170f-a268-4c18-9012-2b99ec73bbf1\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.376288 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xm44b\" (UniqueName: \"kubernetes.io/projected/20e32d2e-0738-45f0-bb91-b8e48694928b-kube-api-access-xm44b\") pod \"20e32d2e-0738-45f0-bb91-b8e48694928b\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.377289 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-config-data\") pod \"20e32d2e-0738-45f0-bb91-b8e48694928b\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.377449 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20e32d2e-0738-45f0-bb91-b8e48694928b-etc-machine-id\") pod \"20e32d2e-0738-45f0-bb91-b8e48694928b\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.377549 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-scripts\") pod \"20e32d2e-0738-45f0-bb91-b8e48694928b\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.377702 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-nb\") pod \"5328170f-a268-4c18-9012-2b99ec73bbf1\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.377843 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-combined-ca-bundle\") pod \"20e32d2e-0738-45f0-bb91-b8e48694928b\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.377916 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-db-sync-config-data\") pod \"20e32d2e-0738-45f0-bb91-b8e48694928b\" (UID: \"20e32d2e-0738-45f0-bb91-b8e48694928b\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.378029 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m254f\" (UniqueName: \"kubernetes.io/projected/5328170f-a268-4c18-9012-2b99ec73bbf1-kube-api-access-m254f\") pod \"5328170f-a268-4c18-9012-2b99ec73bbf1\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.378104 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-dns-svc\") pod \"5328170f-a268-4c18-9012-2b99ec73bbf1\" (UID: \"5328170f-a268-4c18-9012-2b99ec73bbf1\") " Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.380803 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20e32d2e-0738-45f0-bb91-b8e48694928b-kube-api-access-xm44b" (OuterVolumeSpecName: "kube-api-access-xm44b") pod "20e32d2e-0738-45f0-bb91-b8e48694928b" (UID: "20e32d2e-0738-45f0-bb91-b8e48694928b"). InnerVolumeSpecName "kube-api-access-xm44b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.380861 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/20e32d2e-0738-45f0-bb91-b8e48694928b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "20e32d2e-0738-45f0-bb91-b8e48694928b" (UID: "20e32d2e-0738-45f0-bb91-b8e48694928b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.390424 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5328170f-a268-4c18-9012-2b99ec73bbf1-kube-api-access-m254f" (OuterVolumeSpecName: "kube-api-access-m254f") pod "5328170f-a268-4c18-9012-2b99ec73bbf1" (UID: "5328170f-a268-4c18-9012-2b99ec73bbf1"). InnerVolumeSpecName "kube-api-access-m254f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.416275 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-scripts" (OuterVolumeSpecName: "scripts") pod "20e32d2e-0738-45f0-bb91-b8e48694928b" (UID: "20e32d2e-0738-45f0-bb91-b8e48694928b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.422357 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "20e32d2e-0738-45f0-bb91-b8e48694928b" (UID: "20e32d2e-0738-45f0-bb91-b8e48694928b"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.480736 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xm44b\" (UniqueName: \"kubernetes.io/projected/20e32d2e-0738-45f0-bb91-b8e48694928b-kube-api-access-xm44b\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.480766 4669 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/20e32d2e-0738-45f0-bb91-b8e48694928b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.480777 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.480785 4669 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.480795 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m254f\" (UniqueName: \"kubernetes.io/projected/5328170f-a268-4c18-9012-2b99ec73bbf1-kube-api-access-m254f\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.638978 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "20e32d2e-0738-45f0-bb91-b8e48694928b" (UID: "20e32d2e-0738-45f0-bb91-b8e48694928b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.661300 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-config" (OuterVolumeSpecName: "config") pod "5328170f-a268-4c18-9012-2b99ec73bbf1" (UID: "5328170f-a268-4c18-9012-2b99ec73bbf1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.677879 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5328170f-a268-4c18-9012-2b99ec73bbf1" (UID: "5328170f-a268-4c18-9012-2b99ec73bbf1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.684939 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.684965 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.684975 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.691869 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5328170f-a268-4c18-9012-2b99ec73bbf1" (UID: "5328170f-a268-4c18-9012-2b99ec73bbf1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.693033 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5328170f-a268-4c18-9012-2b99ec73bbf1" (UID: "5328170f-a268-4c18-9012-2b99ec73bbf1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.708479 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-config-data" (OuterVolumeSpecName: "config-data") pod "20e32d2e-0738-45f0-bb91-b8e48694928b" (UID: "20e32d2e-0738-45f0-bb91-b8e48694928b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.786809 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.786846 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/20e32d2e-0738-45f0-bb91-b8e48694928b-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:26 crc kubenswrapper[4669]: I1210 15:39:26.786860 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5328170f-a268-4c18-9012-2b99ec73bbf1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.168843 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" event={"ID":"8a62c0c4-e96b-486b-8660-5a797598341b","Type":"ContainerStarted","Data":"76cc74cbf6463c9602b4f7b7952f198f4452f02ccc334481ae23e3193774c54f"} Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.168899 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" event={"ID":"8a62c0c4-e96b-486b-8660-5a797598341b","Type":"ContainerStarted","Data":"a74c057f547a9a35236dc85edd9afb135514ae45dee3e0e23a4859d59460354a"} Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.173901 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-dc69454c7-zsnjz" event={"ID":"87675b91-d647-453f-bb63-2d10ddb27991","Type":"ContainerStarted","Data":"795a73271ec1130221892d46deffa35634c10278dbc1f1b285025773817c26b5"} Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.173959 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-dc69454c7-zsnjz" event={"ID":"87675b91-d647-453f-bb63-2d10ddb27991","Type":"ContainerStarted","Data":"bfb6521cfbbce0fa75c02e5a3d31abbe037cc751189945be80ff95efaa5f8ce9"} Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.176538 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerStarted","Data":"057bf2460e35c72968457185a63bd7d3661ee4f10113628c9f96dd44c4ddb367"} Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.176725 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-central-agent" containerID="cri-o://02d94dba0bb873fa665594bca91a6fb63cff5588f54aa9eb9a55dce1eefaf15f" gracePeriod=30 Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.176826 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.176884 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="proxy-httpd" containerID="cri-o://057bf2460e35c72968457185a63bd7d3661ee4f10113628c9f96dd44c4ddb367" gracePeriod=30 Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.176945 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="sg-core" containerID="cri-o://f5ceb0ad15facabb73d1b75502e5e56dd776dac5e4d377c388824af406d313a8" gracePeriod=30 Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.177003 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-notification-agent" containerID="cri-o://ee9e17c9f85d4e6a1489ac6c3f0c07944b27c4b5069df6f27d3f4355083ec4da" gracePeriod=30 Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.183448 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-547d56d5c6-827zl" event={"ID":"e492df2a-6b67-4562-96cd-3c7495e7f9b3","Type":"ContainerStarted","Data":"c42a74e336a10573bf348b110efc8f43b4e8553718969fb85a8a0de9a8b148e0"} Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.183511 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-8cqw2" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.183787 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f66db59b9-xz9tf" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.186196 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.186591 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.193565 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-d6f8b949d-tbds5" podStartSLOduration=2.869100117 podStartE2EDuration="15.193546915s" podCreationTimestamp="2025-12-10 15:39:12 +0000 UTC" firstStartedPulling="2025-12-10 15:39:13.739348058 +0000 UTC m=+1127.656294685" lastFinishedPulling="2025-12-10 15:39:26.063794856 +0000 UTC m=+1139.980741483" observedRunningTime="2025-12-10 15:39:27.188497473 +0000 UTC m=+1141.105444100" watchObservedRunningTime="2025-12-10 15:39:27.193546915 +0000 UTC m=+1141.110493542" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.228389 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-547d56d5c6-827zl" podStartSLOduration=11.228371735 podStartE2EDuration="11.228371735s" podCreationTimestamp="2025-12-10 15:39:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:27.226062978 +0000 UTC m=+1141.143009605" watchObservedRunningTime="2025-12-10 15:39:27.228371735 +0000 UTC m=+1141.145318362" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.252750 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-dc69454c7-zsnjz" podStartSLOduration=5.285555358 podStartE2EDuration="16.252729169s" podCreationTimestamp="2025-12-10 15:39:11 +0000 UTC" firstStartedPulling="2025-12-10 15:39:13.491397981 +0000 UTC m=+1127.408344598" lastFinishedPulling="2025-12-10 15:39:24.458571782 +0000 UTC m=+1138.375518409" observedRunningTime="2025-12-10 15:39:27.244322474 +0000 UTC m=+1141.161269091" watchObservedRunningTime="2025-12-10 15:39:27.252729169 +0000 UTC m=+1141.169675796" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.274902 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.824472767 podStartE2EDuration="52.274884849s" podCreationTimestamp="2025-12-10 15:38:35 +0000 UTC" firstStartedPulling="2025-12-10 15:38:38.825051966 +0000 UTC m=+1092.741998593" lastFinishedPulling="2025-12-10 15:39:26.275464048 +0000 UTC m=+1140.192410675" observedRunningTime="2025-12-10 15:39:27.266473004 +0000 UTC m=+1141.183419631" watchObservedRunningTime="2025-12-10 15:39:27.274884849 +0000 UTC m=+1141.191831476" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.327967 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-xz9tf"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.346488 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f66db59b9-xz9tf"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.545320 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:27 crc kubenswrapper[4669]: E1210 15:39:27.545663 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerName="dnsmasq-dns" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.545680 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerName="dnsmasq-dns" Dec 10 15:39:27 crc kubenswrapper[4669]: E1210 15:39:27.545700 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerName="init" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.545706 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerName="init" Dec 10 15:39:27 crc kubenswrapper[4669]: E1210 15:39:27.545718 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e32d2e-0738-45f0-bb91-b8e48694928b" containerName="cinder-db-sync" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.545726 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e32d2e-0738-45f0-bb91-b8e48694928b" containerName="cinder-db-sync" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.545883 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" containerName="dnsmasq-dns" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.545897 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="20e32d2e-0738-45f0-bb91-b8e48694928b" containerName="cinder-db-sync" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.556912 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.560038 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.560250 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-f45n4" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.560351 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.560435 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.575207 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704028 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-5nkn8"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704142 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704257 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-scripts\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704281 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704305 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgrpm\" (UniqueName: \"kubernetes.io/projected/ca8e0593-5211-41fe-b79d-68e63a88f9da-kube-api-access-lgrpm\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704355 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca8e0593-5211-41fe-b79d-68e63a88f9da-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.704379 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.705429 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.726898 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-5nkn8"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806163 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806225 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-dns-svc\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806248 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-scripts\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806269 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806295 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vkfc\" (UniqueName: \"kubernetes.io/projected/a7688848-28a2-4b53-bc4e-4867d1ef570e-kube-api-access-5vkfc\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806315 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgrpm\" (UniqueName: \"kubernetes.io/projected/ca8e0593-5211-41fe-b79d-68e63a88f9da-kube-api-access-lgrpm\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806375 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca8e0593-5211-41fe-b79d-68e63a88f9da-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806400 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-config\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806422 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806475 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806497 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.806756 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca8e0593-5211-41fe-b79d-68e63a88f9da-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.839833 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.841369 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.844899 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-scripts\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.848950 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgrpm\" (UniqueName: \"kubernetes.io/projected/ca8e0593-5211-41fe-b79d-68e63a88f9da-kube-api-access-lgrpm\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.850499 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.898870 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.909231 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.909313 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.909336 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-dns-svc\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.909368 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vkfc\" (UniqueName: \"kubernetes.io/projected/a7688848-28a2-4b53-bc4e-4867d1ef570e-kube-api-access-5vkfc\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.909422 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-config\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.910210 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-dns-svc\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.910591 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-config\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.910763 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-nb\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.911231 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-sb\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.913316 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.914819 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.920497 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.960574 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:27 crc kubenswrapper[4669]: I1210 15:39:27.992399 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vkfc\" (UniqueName: \"kubernetes.io/projected/a7688848-28a2-4b53-bc4e-4867d1ef570e-kube-api-access-5vkfc\") pod \"dnsmasq-dns-58db5546cc-5nkn8\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011048 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61051780-e82f-4a1c-8115-a8318fa58b2a-logs\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011137 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-scripts\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011198 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data-custom\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011275 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61051780-e82f-4a1c-8115-a8318fa58b2a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011349 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vflvw\" (UniqueName: \"kubernetes.io/projected/61051780-e82f-4a1c-8115-a8318fa58b2a-kube-api-access-vflvw\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011417 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.011436 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.044089 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.122490 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.122535 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.124768 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61051780-e82f-4a1c-8115-a8318fa58b2a-logs\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.124918 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-scripts\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.124967 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data-custom\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.125035 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61051780-e82f-4a1c-8115-a8318fa58b2a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.125120 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vflvw\" (UniqueName: \"kubernetes.io/projected/61051780-e82f-4a1c-8115-a8318fa58b2a-kube-api-access-vflvw\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.142352 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.142453 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61051780-e82f-4a1c-8115-a8318fa58b2a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.143695 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61051780-e82f-4a1c-8115-a8318fa58b2a-logs\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.144205 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data-custom\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.153982 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.163572 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-scripts\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.195783 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vflvw\" (UniqueName: \"kubernetes.io/projected/61051780-e82f-4a1c-8115-a8318fa58b2a-kube-api-access-vflvw\") pod \"cinder-api-0\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.245109 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.245517 4669 generic.go:334] "Generic (PLEG): container finished" podID="e57dc4fd-35d5-4151-b620-7903e12be753" containerID="057bf2460e35c72968457185a63bd7d3661ee4f10113628c9f96dd44c4ddb367" exitCode=0 Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.245545 4669 generic.go:334] "Generic (PLEG): container finished" podID="e57dc4fd-35d5-4151-b620-7903e12be753" containerID="f5ceb0ad15facabb73d1b75502e5e56dd776dac5e4d377c388824af406d313a8" exitCode=2 Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.245681 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerDied","Data":"057bf2460e35c72968457185a63bd7d3661ee4f10113628c9f96dd44c4ddb367"} Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.245726 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerDied","Data":"f5ceb0ad15facabb73d1b75502e5e56dd776dac5e4d377c388824af406d313a8"} Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.425019 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5328170f-a268-4c18-9012-2b99ec73bbf1" path="/var/lib/kubelet/pods/5328170f-a268-4c18-9012-2b99ec73bbf1/volumes" Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.699985 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:28 crc kubenswrapper[4669]: I1210 15:39:28.900418 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-5nkn8"] Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.118882 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.278543 4669 generic.go:334] "Generic (PLEG): container finished" podID="e57dc4fd-35d5-4151-b620-7903e12be753" containerID="ee9e17c9f85d4e6a1489ac6c3f0c07944b27c4b5069df6f27d3f4355083ec4da" exitCode=0 Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.278571 4669 generic.go:334] "Generic (PLEG): container finished" podID="e57dc4fd-35d5-4151-b620-7903e12be753" containerID="02d94dba0bb873fa665594bca91a6fb63cff5588f54aa9eb9a55dce1eefaf15f" exitCode=0 Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.278607 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerDied","Data":"ee9e17c9f85d4e6a1489ac6c3f0c07944b27c4b5069df6f27d3f4355083ec4da"} Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.278633 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerDied","Data":"02d94dba0bb873fa665594bca91a6fb63cff5588f54aa9eb9a55dce1eefaf15f"} Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.278646 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e57dc4fd-35d5-4151-b620-7903e12be753","Type":"ContainerDied","Data":"11c764e6ea5b25c74eb6847e7250c0cd84d1471e3a8beab5c0bc9251c0762c3e"} Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.278655 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="11c764e6ea5b25c74eb6847e7250c0cd84d1471e3a8beab5c0bc9251c0762c3e" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.281926 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"61051780-e82f-4a1c-8115-a8318fa58b2a","Type":"ContainerStarted","Data":"7d788aec81efb5fca7d6b0baf0408a24ccfd10506267e16d1211a74c0e3e9d6b"} Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.287721 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" event={"ID":"a7688848-28a2-4b53-bc4e-4867d1ef570e","Type":"ContainerStarted","Data":"9a2a74f34ccfe1ff7442ffba39a48da7897a3264fed1301791fc3b90d4ff0736"} Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.294716 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ca8e0593-5211-41fe-b79d-68e63a88f9da","Type":"ContainerStarted","Data":"5e844cbccdfcdfd9196afd1b349ce7fb26fc86837f05b3b25ffad9bf945d1555"} Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.358434 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.488902 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-scripts\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.488989 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-run-httpd\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.489028 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-log-httpd\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.489106 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-combined-ca-bundle\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.489137 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-config-data\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.489173 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9r6s9\" (UniqueName: \"kubernetes.io/projected/e57dc4fd-35d5-4151-b620-7903e12be753-kube-api-access-9r6s9\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.489202 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-sg-core-conf-yaml\") pod \"e57dc4fd-35d5-4151-b620-7903e12be753\" (UID: \"e57dc4fd-35d5-4151-b620-7903e12be753\") " Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.489910 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.490616 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.504102 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-scripts" (OuterVolumeSpecName: "scripts") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.533301 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e57dc4fd-35d5-4151-b620-7903e12be753-kube-api-access-9r6s9" (OuterVolumeSpecName: "kube-api-access-9r6s9") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "kube-api-access-9r6s9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.565768 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.591064 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.591093 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.591102 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e57dc4fd-35d5-4151-b620-7903e12be753-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.591111 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9r6s9\" (UniqueName: \"kubernetes.io/projected/e57dc4fd-35d5-4151-b620-7903e12be753-kube-api-access-9r6s9\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.591120 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.621817 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.632387 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-config-data" (OuterVolumeSpecName: "config-data") pod "e57dc4fd-35d5-4151-b620-7903e12be753" (UID: "e57dc4fd-35d5-4151-b620-7903e12be753"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.692375 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:29 crc kubenswrapper[4669]: I1210 15:39:29.692476 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e57dc4fd-35d5-4151-b620-7903e12be753-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.320788 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"61051780-e82f-4a1c-8115-a8318fa58b2a","Type":"ContainerStarted","Data":"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161"} Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.323269 4669 generic.go:334] "Generic (PLEG): container finished" podID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerID="c52155ee6a60d474f77068e3a9c4bb754e1376e10fb4ce2e64c8a548a5a5eddb" exitCode=0 Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.323350 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.323361 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" event={"ID":"a7688848-28a2-4b53-bc4e-4867d1ef570e","Type":"ContainerDied","Data":"c52155ee6a60d474f77068e3a9c4bb754e1376e10fb4ce2e64c8a548a5a5eddb"} Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.511158 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.524395 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.540840 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:39:30 crc kubenswrapper[4669]: E1210 15:39:30.541374 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="proxy-httpd" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541431 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="proxy-httpd" Dec 10 15:39:30 crc kubenswrapper[4669]: E1210 15:39:30.541473 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="sg-core" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541482 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="sg-core" Dec 10 15:39:30 crc kubenswrapper[4669]: E1210 15:39:30.541499 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-central-agent" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541506 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-central-agent" Dec 10 15:39:30 crc kubenswrapper[4669]: E1210 15:39:30.541523 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-notification-agent" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541530 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-notification-agent" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541745 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-central-agent" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541767 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="proxy-httpd" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541796 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="ceilometer-notification-agent" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.541811 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" containerName="sg-core" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.547510 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.551704 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.551883 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.557566 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.713486 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-scripts\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.714009 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.714076 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.714133 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-log-httpd\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.714322 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-run-httpd\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.714379 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whjm4\" (UniqueName: \"kubernetes.io/projected/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-kube-api-access-whjm4\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.714402 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-config-data\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816445 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816497 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816525 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-log-httpd\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816591 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-run-httpd\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816622 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whjm4\" (UniqueName: \"kubernetes.io/projected/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-kube-api-access-whjm4\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816637 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-config-data\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.816668 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-scripts\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.817447 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-log-httpd\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.819246 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-run-httpd\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.821187 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.822117 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-config-data\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.822661 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-scripts\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.822776 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.840885 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whjm4\" (UniqueName: \"kubernetes.io/projected/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-kube-api-access-whjm4\") pod \"ceilometer-0\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " pod="openstack/ceilometer-0" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.848787 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:30 crc kubenswrapper[4669]: I1210 15:39:30.889643 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.057437 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.358195 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ca8e0593-5211-41fe-b79d-68e63a88f9da","Type":"ContainerStarted","Data":"560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134"} Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.367202 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" event={"ID":"a7688848-28a2-4b53-bc4e-4867d1ef570e","Type":"ContainerStarted","Data":"5bd1c9e8f2f95af4e825f3ad173461c8b56a7f7c3daefb3ea81a420c10dff1bd"} Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.369872 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.418242 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" podStartSLOduration=4.418202626 podStartE2EDuration="4.418202626s" podCreationTimestamp="2025-12-10 15:39:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:31.405806834 +0000 UTC m=+1145.322753461" watchObservedRunningTime="2025-12-10 15:39:31.418202626 +0000 UTC m=+1145.335149253" Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.461818 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:31 crc kubenswrapper[4669]: I1210 15:39:31.571451 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.377184 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ca8e0593-5211-41fe-b79d-68e63a88f9da","Type":"ContainerStarted","Data":"468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8"} Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.379114 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerStarted","Data":"05004f7cf78829c958402d183d1fba3bfa9d3528d721b1c3a7f099c288b6c991"} Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.381254 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"61051780-e82f-4a1c-8115-a8318fa58b2a","Type":"ContainerStarted","Data":"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52"} Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.381345 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api-log" containerID="cri-o://c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161" gracePeriod=30 Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.381434 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api" containerID="cri-o://11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52" gracePeriod=30 Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.381526 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.409818 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.452703037 podStartE2EDuration="5.409802867s" podCreationTimestamp="2025-12-10 15:39:27 +0000 UTC" firstStartedPulling="2025-12-10 15:39:28.693145694 +0000 UTC m=+1142.610092321" lastFinishedPulling="2025-12-10 15:39:29.650245524 +0000 UTC m=+1143.567192151" observedRunningTime="2025-12-10 15:39:32.407367137 +0000 UTC m=+1146.324313764" watchObservedRunningTime="2025-12-10 15:39:32.409802867 +0000 UTC m=+1146.326749484" Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.410173 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e57dc4fd-35d5-4151-b620-7903e12be753" path="/var/lib/kubelet/pods/e57dc4fd-35d5-4151-b620-7903e12be753/volumes" Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.438627 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.438603409 podStartE2EDuration="5.438603409s" podCreationTimestamp="2025-12-10 15:39:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:32.430329467 +0000 UTC m=+1146.347276094" watchObservedRunningTime="2025-12-10 15:39:32.438603409 +0000 UTC m=+1146.355550046" Dec 10 15:39:32 crc kubenswrapper[4669]: I1210 15:39:32.899656 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.023849 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164425 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61051780-e82f-4a1c-8115-a8318fa58b2a-etc-machine-id\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164491 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-combined-ca-bundle\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164522 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164538 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/61051780-e82f-4a1c-8115-a8318fa58b2a-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164615 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data-custom\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164663 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vflvw\" (UniqueName: \"kubernetes.io/projected/61051780-e82f-4a1c-8115-a8318fa58b2a-kube-api-access-vflvw\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164691 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61051780-e82f-4a1c-8115-a8318fa58b2a-logs\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.164747 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-scripts\") pod \"61051780-e82f-4a1c-8115-a8318fa58b2a\" (UID: \"61051780-e82f-4a1c-8115-a8318fa58b2a\") " Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.165060 4669 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/61051780-e82f-4a1c-8115-a8318fa58b2a-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.166154 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61051780-e82f-4a1c-8115-a8318fa58b2a-logs" (OuterVolumeSpecName: "logs") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.175364 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-scripts" (OuterVolumeSpecName: "scripts") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.179375 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61051780-e82f-4a1c-8115-a8318fa58b2a-kube-api-access-vflvw" (OuterVolumeSpecName: "kube-api-access-vflvw") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "kube-api-access-vflvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.192377 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.232109 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data" (OuterVolumeSpecName: "config-data") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.256718 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "61051780-e82f-4a1c-8115-a8318fa58b2a" (UID: "61051780-e82f-4a1c-8115-a8318fa58b2a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.268180 4669 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.268235 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vflvw\" (UniqueName: \"kubernetes.io/projected/61051780-e82f-4a1c-8115-a8318fa58b2a-kube-api-access-vflvw\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.268249 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61051780-e82f-4a1c-8115-a8318fa58b2a-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.268259 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.268267 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.268276 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/61051780-e82f-4a1c-8115-a8318fa58b2a-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.391746 4669 generic.go:334] "Generic (PLEG): container finished" podID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerID="11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52" exitCode=0 Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.392055 4669 generic.go:334] "Generic (PLEG): container finished" podID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerID="c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161" exitCode=143 Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.391894 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"61051780-e82f-4a1c-8115-a8318fa58b2a","Type":"ContainerDied","Data":"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52"} Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.392149 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"61051780-e82f-4a1c-8115-a8318fa58b2a","Type":"ContainerDied","Data":"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161"} Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.392167 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"61051780-e82f-4a1c-8115-a8318fa58b2a","Type":"ContainerDied","Data":"7d788aec81efb5fca7d6b0baf0408a24ccfd10506267e16d1211a74c0e3e9d6b"} Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.392182 4669 scope.go:117] "RemoveContainer" containerID="11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.391989 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.413858 4669 scope.go:117] "RemoveContainer" containerID="c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.442765 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.459080 4669 scope.go:117] "RemoveContainer" containerID="11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.459184 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.465782 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:33 crc kubenswrapper[4669]: E1210 15:39:33.466121 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.466136 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api" Dec 10 15:39:33 crc kubenswrapper[4669]: E1210 15:39:33.466167 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api-log" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.466174 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api-log" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.466483 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.466505 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" containerName="cinder-api-log" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.467380 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.472773 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.472979 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.473094 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 10 15:39:33 crc kubenswrapper[4669]: E1210 15:39:33.473132 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52\": container with ID starting with 11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52 not found: ID does not exist" containerID="11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.473169 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52"} err="failed to get container status \"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52\": rpc error: code = NotFound desc = could not find container \"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52\": container with ID starting with 11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52 not found: ID does not exist" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.473194 4669 scope.go:117] "RemoveContainer" containerID="c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161" Dec 10 15:39:33 crc kubenswrapper[4669]: E1210 15:39:33.482797 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161\": container with ID starting with c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161 not found: ID does not exist" containerID="c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.482882 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161"} err="failed to get container status \"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161\": rpc error: code = NotFound desc = could not find container \"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161\": container with ID starting with c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161 not found: ID does not exist" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.482928 4669 scope.go:117] "RemoveContainer" containerID="11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.488394 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.491936 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52"} err="failed to get container status \"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52\": rpc error: code = NotFound desc = could not find container \"11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52\": container with ID starting with 11a00e4b90fb17935f4cbf343b7a57db91e33c1406a9184ac42ce4fd5a032a52 not found: ID does not exist" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.491981 4669 scope.go:117] "RemoveContainer" containerID="c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.517333 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161"} err="failed to get container status \"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161\": rpc error: code = NotFound desc = could not find container \"c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161\": container with ID starting with c027bb56676c4c8883b23753f4a6b55e2d190b12b32212ae2cc72fbee999d161 not found: ID does not exist" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588470 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-public-tls-certs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588528 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-scripts\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588568 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmgbp\" (UniqueName: \"kubernetes.io/projected/345ad3da-25c1-4df8-8787-41d753b480ce-kube-api-access-wmgbp\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588592 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-config-data-custom\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588653 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588689 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588736 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/345ad3da-25c1-4df8-8787-41d753b480ce-etc-machine-id\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588757 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/345ad3da-25c1-4df8-8787-41d753b480ce-logs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.588922 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-config-data\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692332 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-config-data\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692378 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-public-tls-certs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692420 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-scripts\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692448 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmgbp\" (UniqueName: \"kubernetes.io/projected/345ad3da-25c1-4df8-8787-41d753b480ce-kube-api-access-wmgbp\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692464 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-config-data-custom\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692519 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692556 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692579 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/345ad3da-25c1-4df8-8787-41d753b480ce-etc-machine-id\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.692599 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/345ad3da-25c1-4df8-8787-41d753b480ce-logs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.693182 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/345ad3da-25c1-4df8-8787-41d753b480ce-logs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.695149 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/345ad3da-25c1-4df8-8787-41d753b480ce-etc-machine-id\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.698351 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-public-tls-certs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.699989 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.701362 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-scripts\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.708786 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-config-data\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.708821 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-config-data-custom\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.710888 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/345ad3da-25c1-4df8-8787-41d753b480ce-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.714127 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmgbp\" (UniqueName: \"kubernetes.io/projected/345ad3da-25c1-4df8-8787-41d753b480ce-kube-api-access-wmgbp\") pod \"cinder-api-0\" (UID: \"345ad3da-25c1-4df8-8787-41d753b480ce\") " pod="openstack/cinder-api-0" Dec 10 15:39:33 crc kubenswrapper[4669]: I1210 15:39:33.844036 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.257676 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-547d56d5c6-827zl" Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.356870 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c87fd57c8-hv4dt"] Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.357160 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c87fd57c8-hv4dt" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api-log" containerID="cri-o://22350a5525fc09baa7405238497ec6b59731b91188079f0f90fd57ce970f2172" gracePeriod=30 Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.357532 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5c87fd57c8-hv4dt" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api" containerID="cri-o://153610108aceb7cf5653d3945d1e1b6277dce157b5b587f68baf54874374966e" gracePeriod=30 Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.385445 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.461581 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61051780-e82f-4a1c-8115-a8318fa58b2a" path="/var/lib/kubelet/pods/61051780-e82f-4a1c-8115-a8318fa58b2a/volumes" Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.465548 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerStarted","Data":"a15461ba45d52eba788ae06c91731dc7cb3d7d680b82c39bc9479e48e6c3360d"} Dec 10 15:39:34 crc kubenswrapper[4669]: I1210 15:39:34.492698 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"345ad3da-25c1-4df8-8787-41d753b480ce","Type":"ContainerStarted","Data":"d73dff8a6113b0b82de588137eb18ec5bc2b649d31f606d48322bfaca138fec6"} Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.142594 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-6d8556c6c7-9clqn" Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.240286 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7bdbfc58b4-xzzxl"] Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.240815 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7bdbfc58b4-xzzxl" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-api" containerID="cri-o://be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259" gracePeriod=30 Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.241255 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7bdbfc58b4-xzzxl" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-httpd" containerID="cri-o://e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e" gracePeriod=30 Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.522055 4669 generic.go:334] "Generic (PLEG): container finished" podID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerID="22350a5525fc09baa7405238497ec6b59731b91188079f0f90fd57ce970f2172" exitCode=143 Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.522342 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c87fd57c8-hv4dt" event={"ID":"db53e274-36cd-48a1-b907-ba4ed4e5d7e7","Type":"ContainerDied","Data":"22350a5525fc09baa7405238497ec6b59731b91188079f0f90fd57ce970f2172"} Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.562553 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"345ad3da-25c1-4df8-8787-41d753b480ce","Type":"ContainerStarted","Data":"ea6059abd9a9c11c131ab41382d7116f1762a8b8c6ae5589178b30980c54f740"} Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.570848 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerStarted","Data":"093261ba1eddba73ae8d57da1762a81806d0b787c05e18dd28e9aa76e719ae86"} Dec 10 15:39:35 crc kubenswrapper[4669]: I1210 15:39:35.570887 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerStarted","Data":"3b7258bfbb07d947f4bf3df182f6c5ef90aabba4f4b922f5972f8af10fe969e7"} Dec 10 15:39:36 crc kubenswrapper[4669]: I1210 15:39:36.583597 4669 generic.go:334] "Generic (PLEG): container finished" podID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerID="e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e" exitCode=0 Dec 10 15:39:36 crc kubenswrapper[4669]: I1210 15:39:36.583722 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bdbfc58b4-xzzxl" event={"ID":"986eb035-7a6f-4395-9baa-fd984f4bb232","Type":"ContainerDied","Data":"e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e"} Dec 10 15:39:36 crc kubenswrapper[4669]: I1210 15:39:36.589739 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"345ad3da-25c1-4df8-8787-41d753b480ce","Type":"ContainerStarted","Data":"44b9a4ef9e0a09f1cc9c691030cd10c20f3a604d58e6481191a21d6c043e2171"} Dec 10 15:39:36 crc kubenswrapper[4669]: I1210 15:39:36.590785 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 10 15:39:36 crc kubenswrapper[4669]: I1210 15:39:36.626864 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.626845872 podStartE2EDuration="3.626845872s" podCreationTimestamp="2025-12-10 15:39:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:36.620278561 +0000 UTC m=+1150.537225188" watchObservedRunningTime="2025-12-10 15:39:36.626845872 +0000 UTC m=+1150.543792499" Dec 10 15:39:37 crc kubenswrapper[4669]: I1210 15:39:37.603049 4669 generic.go:334] "Generic (PLEG): container finished" podID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerID="153610108aceb7cf5653d3945d1e1b6277dce157b5b587f68baf54874374966e" exitCode=0 Dec 10 15:39:37 crc kubenswrapper[4669]: I1210 15:39:37.603130 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c87fd57c8-hv4dt" event={"ID":"db53e274-36cd-48a1-b907-ba4ed4e5d7e7","Type":"ContainerDied","Data":"153610108aceb7cf5653d3945d1e1b6277dce157b5b587f68baf54874374966e"} Dec 10 15:39:37 crc kubenswrapper[4669]: I1210 15:39:37.613068 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerStarted","Data":"76d5969153bd21b9e87cb994ca9f91347dba1b9dd4dff79dba16d4c810553344"} Dec 10 15:39:37 crc kubenswrapper[4669]: I1210 15:39:37.640729 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.591112658 podStartE2EDuration="7.640707635s" podCreationTimestamp="2025-12-10 15:39:30 +0000 UTC" firstStartedPulling="2025-12-10 15:39:31.617951817 +0000 UTC m=+1145.534898444" lastFinishedPulling="2025-12-10 15:39:36.667546794 +0000 UTC m=+1150.584493421" observedRunningTime="2025-12-10 15:39:37.632822003 +0000 UTC m=+1151.549768630" watchObservedRunningTime="2025-12-10 15:39:37.640707635 +0000 UTC m=+1151.557654262" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.016563 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.046757 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.098703 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data-custom\") pod \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.098778 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-logs\") pod \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.098805 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data\") pod \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.098827 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nmld\" (UniqueName: \"kubernetes.io/projected/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-kube-api-access-8nmld\") pod \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.098871 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-combined-ca-bundle\") pod \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\" (UID: \"db53e274-36cd-48a1-b907-ba4ed4e5d7e7\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.099750 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-logs" (OuterVolumeSpecName: "logs") pod "db53e274-36cd-48a1-b907-ba4ed4e5d7e7" (UID: "db53e274-36cd-48a1-b907-ba4ed4e5d7e7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.100371 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.108277 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "db53e274-36cd-48a1-b907-ba4ed4e5d7e7" (UID: "db53e274-36cd-48a1-b907-ba4ed4e5d7e7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.137565 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-kube-api-access-8nmld" (OuterVolumeSpecName: "kube-api-access-8nmld") pod "db53e274-36cd-48a1-b907-ba4ed4e5d7e7" (UID: "db53e274-36cd-48a1-b907-ba4ed4e5d7e7"). InnerVolumeSpecName "kube-api-access-8nmld". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.155227 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-ff92t"] Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.155464 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-869f779d85-ff92t" podUID="1256f642-992a-4d14-b552-5b471de3a211" containerName="dnsmasq-dns" containerID="cri-o://03da3e73be53cf4fcb605a858943b1c99057f8fffdf64de833e1ec603f3d5207" gracePeriod=10 Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.158361 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db53e274-36cd-48a1-b907-ba4ed4e5d7e7" (UID: "db53e274-36cd-48a1-b907-ba4ed4e5d7e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.202242 4669 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.202273 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nmld\" (UniqueName: \"kubernetes.io/projected/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-kube-api-access-8nmld\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.202283 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.218380 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data" (OuterVolumeSpecName: "config-data") pod "db53e274-36cd-48a1-b907-ba4ed4e5d7e7" (UID: "db53e274-36cd-48a1-b907-ba4ed4e5d7e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.235169 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.304874 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db53e274-36cd-48a1-b907-ba4ed4e5d7e7-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.318288 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.405644 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.506674 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-ovndb-tls-certs\") pod \"986eb035-7a6f-4395-9baa-fd984f4bb232\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.506727 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-config\") pod \"986eb035-7a6f-4395-9baa-fd984f4bb232\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.506863 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-httpd-config\") pod \"986eb035-7a6f-4395-9baa-fd984f4bb232\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.506928 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-combined-ca-bundle\") pod \"986eb035-7a6f-4395-9baa-fd984f4bb232\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.506954 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pz44b\" (UniqueName: \"kubernetes.io/projected/986eb035-7a6f-4395-9baa-fd984f4bb232-kube-api-access-pz44b\") pod \"986eb035-7a6f-4395-9baa-fd984f4bb232\" (UID: \"986eb035-7a6f-4395-9baa-fd984f4bb232\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.513841 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "986eb035-7a6f-4395-9baa-fd984f4bb232" (UID: "986eb035-7a6f-4395-9baa-fd984f4bb232"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.519705 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/986eb035-7a6f-4395-9baa-fd984f4bb232-kube-api-access-pz44b" (OuterVolumeSpecName: "kube-api-access-pz44b") pod "986eb035-7a6f-4395-9baa-fd984f4bb232" (UID: "986eb035-7a6f-4395-9baa-fd984f4bb232"). InnerVolumeSpecName "kube-api-access-pz44b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.563296 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-config" (OuterVolumeSpecName: "config") pod "986eb035-7a6f-4395-9baa-fd984f4bb232" (UID: "986eb035-7a6f-4395-9baa-fd984f4bb232"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.564411 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "986eb035-7a6f-4395-9baa-fd984f4bb232" (UID: "986eb035-7a6f-4395-9baa-fd984f4bb232"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.590688 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "986eb035-7a6f-4395-9baa-fd984f4bb232" (UID: "986eb035-7a6f-4395-9baa-fd984f4bb232"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.610963 4669 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.619825 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.621907 4669 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.621921 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/986eb035-7a6f-4395-9baa-fd984f4bb232-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.621956 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pz44b\" (UniqueName: \"kubernetes.io/projected/986eb035-7a6f-4395-9baa-fd984f4bb232-kube-api-access-pz44b\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.644647 4669 generic.go:334] "Generic (PLEG): container finished" podID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerID="be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259" exitCode=0 Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.644708 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bdbfc58b4-xzzxl" event={"ID":"986eb035-7a6f-4395-9baa-fd984f4bb232","Type":"ContainerDied","Data":"be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259"} Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.644733 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7bdbfc58b4-xzzxl" event={"ID":"986eb035-7a6f-4395-9baa-fd984f4bb232","Type":"ContainerDied","Data":"38cce0ddf1123a9abca7fc3cf3b0bd4effeb625b4f31c094527467463449cdb7"} Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.644749 4669 scope.go:117] "RemoveContainer" containerID="e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.644886 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7bdbfc58b4-xzzxl" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.652592 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5c87fd57c8-hv4dt" event={"ID":"db53e274-36cd-48a1-b907-ba4ed4e5d7e7","Type":"ContainerDied","Data":"2a5ba981ef162a714ed62fc258b986c3aac93fd120f8229f8014171433e2a9c0"} Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.652705 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5c87fd57c8-hv4dt" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.669742 4669 generic.go:334] "Generic (PLEG): container finished" podID="1256f642-992a-4d14-b552-5b471de3a211" containerID="03da3e73be53cf4fcb605a858943b1c99057f8fffdf64de833e1ec603f3d5207" exitCode=0 Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.669996 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-ff92t" event={"ID":"1256f642-992a-4d14-b552-5b471de3a211","Type":"ContainerDied","Data":"03da3e73be53cf4fcb605a858943b1c99057f8fffdf64de833e1ec603f3d5207"} Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.670329 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.670476 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="cinder-scheduler" containerID="cri-o://560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134" gracePeriod=30 Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.670540 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="probe" containerID="cri-o://468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8" gracePeriod=30 Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.755400 4669 scope.go:117] "RemoveContainer" containerID="be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.756301 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5c87fd57c8-hv4dt"] Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.764323 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5c87fd57c8-hv4dt"] Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.773279 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7bdbfc58b4-xzzxl"] Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.783971 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7bdbfc58b4-xzzxl"] Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.850897 4669 scope.go:117] "RemoveContainer" containerID="e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e" Dec 10 15:39:38 crc kubenswrapper[4669]: E1210 15:39:38.851664 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e\": container with ID starting with e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e not found: ID does not exist" containerID="e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.851709 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e"} err="failed to get container status \"e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e\": rpc error: code = NotFound desc = could not find container \"e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e\": container with ID starting with e0ceb17dd8a26458fc409d028b9768aa69833f392a254e5feca20b4d2b72659e not found: ID does not exist" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.851740 4669 scope.go:117] "RemoveContainer" containerID="be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.851861 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:38 crc kubenswrapper[4669]: E1210 15:39:38.853425 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259\": container with ID starting with be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259 not found: ID does not exist" containerID="be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.853452 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259"} err="failed to get container status \"be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259\": rpc error: code = NotFound desc = could not find container \"be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259\": container with ID starting with be4c6ae2f81a606f45fb907110eb463846c71e0b5d0b873367129fc0563f2259 not found: ID does not exist" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.853475 4669 scope.go:117] "RemoveContainer" containerID="153610108aceb7cf5653d3945d1e1b6277dce157b5b587f68baf54874374966e" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.885304 4669 scope.go:117] "RemoveContainer" containerID="22350a5525fc09baa7405238497ec6b59731b91188079f0f90fd57ce970f2172" Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.937472 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-dns-svc\") pod \"1256f642-992a-4d14-b552-5b471de3a211\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.937547 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-sb\") pod \"1256f642-992a-4d14-b552-5b471de3a211\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.937620 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-config\") pod \"1256f642-992a-4d14-b552-5b471de3a211\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.937669 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-nb\") pod \"1256f642-992a-4d14-b552-5b471de3a211\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.937705 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7bxw2\" (UniqueName: \"kubernetes.io/projected/1256f642-992a-4d14-b552-5b471de3a211-kube-api-access-7bxw2\") pod \"1256f642-992a-4d14-b552-5b471de3a211\" (UID: \"1256f642-992a-4d14-b552-5b471de3a211\") " Dec 10 15:39:38 crc kubenswrapper[4669]: I1210 15:39:38.941800 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1256f642-992a-4d14-b552-5b471de3a211-kube-api-access-7bxw2" (OuterVolumeSpecName: "kube-api-access-7bxw2") pod "1256f642-992a-4d14-b552-5b471de3a211" (UID: "1256f642-992a-4d14-b552-5b471de3a211"). InnerVolumeSpecName "kube-api-access-7bxw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.008981 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1256f642-992a-4d14-b552-5b471de3a211" (UID: "1256f642-992a-4d14-b552-5b471de3a211"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.012695 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-config" (OuterVolumeSpecName: "config") pod "1256f642-992a-4d14-b552-5b471de3a211" (UID: "1256f642-992a-4d14-b552-5b471de3a211"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.017653 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1256f642-992a-4d14-b552-5b471de3a211" (UID: "1256f642-992a-4d14-b552-5b471de3a211"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.059774 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.059809 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.059821 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.059829 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7bxw2\" (UniqueName: \"kubernetes.io/projected/1256f642-992a-4d14-b552-5b471de3a211-kube-api-access-7bxw2\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.069362 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1256f642-992a-4d14-b552-5b471de3a211" (UID: "1256f642-992a-4d14-b552-5b471de3a211"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.161396 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1256f642-992a-4d14-b552-5b471de3a211-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.688373 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-869f779d85-ff92t" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.688368 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-869f779d85-ff92t" event={"ID":"1256f642-992a-4d14-b552-5b471de3a211","Type":"ContainerDied","Data":"38cf59f3eec2d765a60e079a2aee9c71ccf93b4b5ad1d79cdb6ebffa6dccffcb"} Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.688522 4669 scope.go:117] "RemoveContainer" containerID="03da3e73be53cf4fcb605a858943b1c99057f8fffdf64de833e1ec603f3d5207" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.692569 4669 generic.go:334] "Generic (PLEG): container finished" podID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerID="468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8" exitCode=0 Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.692654 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ca8e0593-5211-41fe-b79d-68e63a88f9da","Type":"ContainerDied","Data":"468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8"} Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.714582 4669 scope.go:117] "RemoveContainer" containerID="31636140f90747493905ceaa0c90f5613bffbd7e283db7ab54f637944582aac7" Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.747675 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-ff92t"] Dec 10 15:39:39 crc kubenswrapper[4669]: I1210 15:39:39.769986 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-869f779d85-ff92t"] Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.303375 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.409020 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1256f642-992a-4d14-b552-5b471de3a211" path="/var/lib/kubelet/pods/1256f642-992a-4d14-b552-5b471de3a211/volumes" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.409609 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" path="/var/lib/kubelet/pods/986eb035-7a6f-4395-9baa-fd984f4bb232/volumes" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.410165 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" path="/var/lib/kubelet/pods/db53e274-36cd-48a1-b907-ba4ed4e5d7e7/volumes" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486172 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca8e0593-5211-41fe-b79d-68e63a88f9da-etc-machine-id\") pod \"ca8e0593-5211-41fe-b79d-68e63a88f9da\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486316 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-scripts\") pod \"ca8e0593-5211-41fe-b79d-68e63a88f9da\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486383 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data-custom\") pod \"ca8e0593-5211-41fe-b79d-68e63a88f9da\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486439 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data\") pod \"ca8e0593-5211-41fe-b79d-68e63a88f9da\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486479 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgrpm\" (UniqueName: \"kubernetes.io/projected/ca8e0593-5211-41fe-b79d-68e63a88f9da-kube-api-access-lgrpm\") pod \"ca8e0593-5211-41fe-b79d-68e63a88f9da\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486570 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-combined-ca-bundle\") pod \"ca8e0593-5211-41fe-b79d-68e63a88f9da\" (UID: \"ca8e0593-5211-41fe-b79d-68e63a88f9da\") " Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.486890 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ca8e0593-5211-41fe-b79d-68e63a88f9da-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ca8e0593-5211-41fe-b79d-68e63a88f9da" (UID: "ca8e0593-5211-41fe-b79d-68e63a88f9da"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.493403 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-scripts" (OuterVolumeSpecName: "scripts") pod "ca8e0593-5211-41fe-b79d-68e63a88f9da" (UID: "ca8e0593-5211-41fe-b79d-68e63a88f9da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.494956 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ca8e0593-5211-41fe-b79d-68e63a88f9da-kube-api-access-lgrpm" (OuterVolumeSpecName: "kube-api-access-lgrpm") pod "ca8e0593-5211-41fe-b79d-68e63a88f9da" (UID: "ca8e0593-5211-41fe-b79d-68e63a88f9da"). InnerVolumeSpecName "kube-api-access-lgrpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.504483 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ca8e0593-5211-41fe-b79d-68e63a88f9da" (UID: "ca8e0593-5211-41fe-b79d-68e63a88f9da"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.561760 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ca8e0593-5211-41fe-b79d-68e63a88f9da" (UID: "ca8e0593-5211-41fe-b79d-68e63a88f9da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.588194 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.588420 4669 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.588510 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgrpm\" (UniqueName: \"kubernetes.io/projected/ca8e0593-5211-41fe-b79d-68e63a88f9da-kube-api-access-lgrpm\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.588564 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.588614 4669 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ca8e0593-5211-41fe-b79d-68e63a88f9da-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.600903 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data" (OuterVolumeSpecName: "config-data") pod "ca8e0593-5211-41fe-b79d-68e63a88f9da" (UID: "ca8e0593-5211-41fe-b79d-68e63a88f9da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.689676 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ca8e0593-5211-41fe-b79d-68e63a88f9da-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.708693 4669 generic.go:334] "Generic (PLEG): container finished" podID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerID="560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134" exitCode=0 Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.708745 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ca8e0593-5211-41fe-b79d-68e63a88f9da","Type":"ContainerDied","Data":"560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134"} Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.708758 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.713431 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ca8e0593-5211-41fe-b79d-68e63a88f9da","Type":"ContainerDied","Data":"5e844cbccdfcdfd9196afd1b349ce7fb26fc86837f05b3b25ffad9bf945d1555"} Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.713481 4669 scope.go:117] "RemoveContainer" containerID="468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.733908 4669 scope.go:117] "RemoveContainer" containerID="560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.749195 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.752516 4669 scope.go:117] "RemoveContainer" containerID="468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.752944 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8\": container with ID starting with 468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8 not found: ID does not exist" containerID="468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.752997 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8"} err="failed to get container status \"468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8\": rpc error: code = NotFound desc = could not find container \"468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8\": container with ID starting with 468f4099933e74ffedc368335bad05968bf50e499784c3644e63dd964f6d1cf8 not found: ID does not exist" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.753029 4669 scope.go:117] "RemoveContainer" containerID="560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.753459 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134\": container with ID starting with 560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134 not found: ID does not exist" containerID="560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.753479 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134"} err="failed to get container status \"560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134\": rpc error: code = NotFound desc = could not find container \"560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134\": container with ID starting with 560122927b25f97ac59563015bb52e9d999c8cf0286d7bac49819e07a1141134 not found: ID does not exist" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.761716 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.777693 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778096 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1256f642-992a-4d14-b552-5b471de3a211" containerName="dnsmasq-dns" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778121 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1256f642-992a-4d14-b552-5b471de3a211" containerName="dnsmasq-dns" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778143 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="probe" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778153 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="probe" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778391 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-httpd" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778401 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-httpd" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778411 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778416 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778429 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-api" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778435 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-api" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778446 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api-log" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778453 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api-log" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778465 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="cinder-scheduler" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778471 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="cinder-scheduler" Dec 10 15:39:40 crc kubenswrapper[4669]: E1210 15:39:40.778483 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1256f642-992a-4d14-b552-5b471de3a211" containerName="init" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778489 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1256f642-992a-4d14-b552-5b471de3a211" containerName="init" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778636 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="probe" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778649 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-api" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778655 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" containerName="cinder-scheduler" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778669 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1256f642-992a-4d14-b552-5b471de3a211" containerName="dnsmasq-dns" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778679 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="986eb035-7a6f-4395-9baa-fd984f4bb232" containerName="neutron-httpd" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778688 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api-log" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.778701 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="db53e274-36cd-48a1-b907-ba4ed4e5d7e7" containerName="barbican-api" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.779584 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.781784 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.804918 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.893673 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.893943 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.894139 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-scripts\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.894383 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvm4l\" (UniqueName: \"kubernetes.io/projected/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-kube-api-access-pvm4l\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.894509 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-config-data\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.894625 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996081 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996203 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-scripts\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996256 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvm4l\" (UniqueName: \"kubernetes.io/projected/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-kube-api-access-pvm4l\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996289 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-config-data\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996333 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996412 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:40 crc kubenswrapper[4669]: I1210 15:39:40.996506 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.000786 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-config-data\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.001440 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.002922 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.004474 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-scripts\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.025911 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvm4l\" (UniqueName: \"kubernetes.io/projected/62ef9f0e-8922-4e9f-a3d5-8c713471cc3e-kube-api-access-pvm4l\") pod \"cinder-scheduler-0\" (UID: \"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e\") " pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.105706 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.537147 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 10 15:39:41 crc kubenswrapper[4669]: I1210 15:39:41.721526 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e","Type":"ContainerStarted","Data":"35cb3753b80603dea7cd552bc34be434c56270827a44903d46754e26098c813e"} Dec 10 15:39:42 crc kubenswrapper[4669]: I1210 15:39:42.410619 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ca8e0593-5211-41fe-b79d-68e63a88f9da" path="/var/lib/kubelet/pods/ca8e0593-5211-41fe-b79d-68e63a88f9da/volumes" Dec 10 15:39:42 crc kubenswrapper[4669]: I1210 15:39:42.737656 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e","Type":"ContainerStarted","Data":"b2a49bd862212c097c971b25dee47916036109a108c8ca67b1984c290d1afdb3"} Dec 10 15:39:44 crc kubenswrapper[4669]: I1210 15:39:44.180501 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"62ef9f0e-8922-4e9f-a3d5-8c713471cc3e","Type":"ContainerStarted","Data":"3486b38e987f41ad63f7b6c4101c6d2df4fe743011990c80aeb49b8849c0b64b"} Dec 10 15:39:44 crc kubenswrapper[4669]: I1210 15:39:44.223812 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.223793077 podStartE2EDuration="4.223793077s" podCreationTimestamp="2025-12-10 15:39:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:39:44.21979093 +0000 UTC m=+1158.136737557" watchObservedRunningTime="2025-12-10 15:39:44.223793077 +0000 UTC m=+1158.140739704" Dec 10 15:39:45 crc kubenswrapper[4669]: I1210 15:39:45.434765 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:45 crc kubenswrapper[4669]: I1210 15:39:45.605331 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-6556f5d7cd-dgcb9" Dec 10 15:39:46 crc kubenswrapper[4669]: I1210 15:39:46.106082 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 10 15:39:46 crc kubenswrapper[4669]: I1210 15:39:46.357176 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-659884ff9-rxsts" Dec 10 15:39:46 crc kubenswrapper[4669]: I1210 15:39:46.984976 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.117479 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.118460 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.120522 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.120946 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.121106 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-vlpvd" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.137573 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.254288 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6e062ab8-0c3e-4566-84d6-32dd4a604f41-openstack-config\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.254353 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wg98d\" (UniqueName: \"kubernetes.io/projected/6e062ab8-0c3e-4566-84d6-32dd4a604f41-kube-api-access-wg98d\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.254414 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6e062ab8-0c3e-4566-84d6-32dd4a604f41-openstack-config-secret\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.254438 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e062ab8-0c3e-4566-84d6-32dd4a604f41-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.356022 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wg98d\" (UniqueName: \"kubernetes.io/projected/6e062ab8-0c3e-4566-84d6-32dd4a604f41-kube-api-access-wg98d\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.356382 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6e062ab8-0c3e-4566-84d6-32dd4a604f41-openstack-config-secret\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.356410 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e062ab8-0c3e-4566-84d6-32dd4a604f41-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.356523 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6e062ab8-0c3e-4566-84d6-32dd4a604f41-openstack-config\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.357315 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/6e062ab8-0c3e-4566-84d6-32dd4a604f41-openstack-config\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.364343 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6e062ab8-0c3e-4566-84d6-32dd4a604f41-combined-ca-bundle\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.373329 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/6e062ab8-0c3e-4566-84d6-32dd4a604f41-openstack-config-secret\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.374019 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wg98d\" (UniqueName: \"kubernetes.io/projected/6e062ab8-0c3e-4566-84d6-32dd4a604f41-kube-api-access-wg98d\") pod \"openstackclient\" (UID: \"6e062ab8-0c3e-4566-84d6-32dd4a604f41\") " pod="openstack/openstackclient" Dec 10 15:39:48 crc kubenswrapper[4669]: I1210 15:39:48.435369 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 10 15:39:49 crc kubenswrapper[4669]: I1210 15:39:49.039939 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 10 15:39:49 crc kubenswrapper[4669]: I1210 15:39:49.222000 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6e062ab8-0c3e-4566-84d6-32dd4a604f41","Type":"ContainerStarted","Data":"082c6e66796ed4b4860fb1fdb24cc2d9e4754df1f8124dd55e80169991f930ef"} Dec 10 15:39:51 crc kubenswrapper[4669]: I1210 15:39:51.610444 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 10 15:39:57 crc kubenswrapper[4669]: I1210 15:39:57.846048 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:39:57 crc kubenswrapper[4669]: I1210 15:39:57.846680 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-central-agent" containerID="cri-o://a15461ba45d52eba788ae06c91731dc7cb3d7d680b82c39bc9479e48e6c3360d" gracePeriod=30 Dec 10 15:39:57 crc kubenswrapper[4669]: I1210 15:39:57.846731 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="proxy-httpd" containerID="cri-o://76d5969153bd21b9e87cb994ca9f91347dba1b9dd4dff79dba16d4c810553344" gracePeriod=30 Dec 10 15:39:57 crc kubenswrapper[4669]: I1210 15:39:57.846785 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-notification-agent" containerID="cri-o://3b7258bfbb07d947f4bf3df182f6c5ef90aabba4f4b922f5972f8af10fe969e7" gracePeriod=30 Dec 10 15:39:57 crc kubenswrapper[4669]: I1210 15:39:57.846735 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="sg-core" containerID="cri-o://093261ba1eddba73ae8d57da1762a81806d0b787c05e18dd28e9aa76e719ae86" gracePeriod=30 Dec 10 15:39:57 crc kubenswrapper[4669]: I1210 15:39:57.862259 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.151:3000/\": EOF" Dec 10 15:39:58 crc kubenswrapper[4669]: I1210 15:39:58.374133 4669 generic.go:334] "Generic (PLEG): container finished" podID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerID="76d5969153bd21b9e87cb994ca9f91347dba1b9dd4dff79dba16d4c810553344" exitCode=0 Dec 10 15:39:58 crc kubenswrapper[4669]: I1210 15:39:58.374178 4669 generic.go:334] "Generic (PLEG): container finished" podID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerID="093261ba1eddba73ae8d57da1762a81806d0b787c05e18dd28e9aa76e719ae86" exitCode=2 Dec 10 15:39:58 crc kubenswrapper[4669]: I1210 15:39:58.374206 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerDied","Data":"76d5969153bd21b9e87cb994ca9f91347dba1b9dd4dff79dba16d4c810553344"} Dec 10 15:39:58 crc kubenswrapper[4669]: I1210 15:39:58.374316 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerDied","Data":"093261ba1eddba73ae8d57da1762a81806d0b787c05e18dd28e9aa76e719ae86"} Dec 10 15:39:58 crc kubenswrapper[4669]: I1210 15:39:58.745138 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:39:58 crc kubenswrapper[4669]: I1210 15:39:58.745664 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:39:59 crc kubenswrapper[4669]: I1210 15:39:59.385036 4669 generic.go:334] "Generic (PLEG): container finished" podID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerID="a15461ba45d52eba788ae06c91731dc7cb3d7d680b82c39bc9479e48e6c3360d" exitCode=0 Dec 10 15:39:59 crc kubenswrapper[4669]: I1210 15:39:59.385077 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerDied","Data":"a15461ba45d52eba788ae06c91731dc7cb3d7d680b82c39bc9479e48e6c3360d"} Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.017275 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-gzt9w"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.018672 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.086591 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gzt9w"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.131117 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-j4vkk"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.132365 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.146733 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-operator-scripts\") pod \"nova-cell0-db-create-j4vkk\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.147027 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb29z\" (UniqueName: \"kubernetes.io/projected/d4467f81-efe4-46f1-9bce-40afbc34252b-kube-api-access-tb29z\") pod \"nova-api-db-create-gzt9w\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.147126 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlrq2\" (UniqueName: \"kubernetes.io/projected/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-kube-api-access-rlrq2\") pod \"nova-cell0-db-create-j4vkk\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.147297 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4467f81-efe4-46f1-9bce-40afbc34252b-operator-scripts\") pod \"nova-api-db-create-gzt9w\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.154843 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-j4vkk"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.227258 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-1075-account-create-update-fw8l2"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.228324 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.237935 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.248343 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlrq2\" (UniqueName: \"kubernetes.io/projected/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-kube-api-access-rlrq2\") pod \"nova-cell0-db-create-j4vkk\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.250844 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4467f81-efe4-46f1-9bce-40afbc34252b-operator-scripts\") pod \"nova-api-db-create-gzt9w\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.250895 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-operator-scripts\") pod \"nova-cell0-db-create-j4vkk\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.250993 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb29z\" (UniqueName: \"kubernetes.io/projected/d4467f81-efe4-46f1-9bce-40afbc34252b-kube-api-access-tb29z\") pod \"nova-api-db-create-gzt9w\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.251804 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4467f81-efe4-46f1-9bce-40afbc34252b-operator-scripts\") pod \"nova-api-db-create-gzt9w\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.257074 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-operator-scripts\") pod \"nova-cell0-db-create-j4vkk\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.259907 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1075-account-create-update-fw8l2"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.289163 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlrq2\" (UniqueName: \"kubernetes.io/projected/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-kube-api-access-rlrq2\") pod \"nova-cell0-db-create-j4vkk\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.298948 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb29z\" (UniqueName: \"kubernetes.io/projected/d4467f81-efe4-46f1-9bce-40afbc34252b-kube-api-access-tb29z\") pod \"nova-api-db-create-gzt9w\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.323650 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-9gbtr"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.326932 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.338034 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.352022 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5bvz4\" (UniqueName: \"kubernetes.io/projected/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-kube-api-access-5bvz4\") pod \"nova-api-1075-account-create-update-fw8l2\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.352128 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-operator-scripts\") pod \"nova-api-1075-account-create-update-fw8l2\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.353309 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-9gbtr"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.416720 4669 generic.go:334] "Generic (PLEG): container finished" podID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerID="3b7258bfbb07d947f4bf3df182f6c5ef90aabba4f4b922f5972f8af10fe969e7" exitCode=0 Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.445146 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerDied","Data":"3b7258bfbb07d947f4bf3df182f6c5ef90aabba4f4b922f5972f8af10fe969e7"} Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.445189 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-1d59-account-create-update-zsxjn"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.446121 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.449540 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1d59-account-create-update-zsxjn"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.450542 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.457576 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.459021 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wljt6\" (UniqueName: \"kubernetes.io/projected/4a238de9-2038-4d87-a2c7-c3646e713865-kube-api-access-wljt6\") pod \"nova-cell0-1d59-account-create-update-zsxjn\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.459085 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a238de9-2038-4d87-a2c7-c3646e713865-operator-scripts\") pod \"nova-cell0-1d59-account-create-update-zsxjn\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.459368 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxx72\" (UniqueName: \"kubernetes.io/projected/33b1a847-d820-4001-ad7e-226c098d4953-kube-api-access-jxx72\") pod \"nova-cell1-db-create-9gbtr\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.459424 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-operator-scripts\") pod \"nova-api-1075-account-create-update-fw8l2\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.459496 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33b1a847-d820-4001-ad7e-226c098d4953-operator-scripts\") pod \"nova-cell1-db-create-9gbtr\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.459657 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5bvz4\" (UniqueName: \"kubernetes.io/projected/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-kube-api-access-5bvz4\") pod \"nova-api-1075-account-create-update-fw8l2\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.461050 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-operator-scripts\") pod \"nova-api-1075-account-create-update-fw8l2\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.479012 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5bvz4\" (UniqueName: \"kubernetes.io/projected/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-kube-api-access-5bvz4\") pod \"nova-api-1075-account-create-update-fw8l2\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.558083 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.561620 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wljt6\" (UniqueName: \"kubernetes.io/projected/4a238de9-2038-4d87-a2c7-c3646e713865-kube-api-access-wljt6\") pod \"nova-cell0-1d59-account-create-update-zsxjn\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.561681 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a238de9-2038-4d87-a2c7-c3646e713865-operator-scripts\") pod \"nova-cell0-1d59-account-create-update-zsxjn\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.561772 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxx72\" (UniqueName: \"kubernetes.io/projected/33b1a847-d820-4001-ad7e-226c098d4953-kube-api-access-jxx72\") pod \"nova-cell1-db-create-9gbtr\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.561833 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33b1a847-d820-4001-ad7e-226c098d4953-operator-scripts\") pod \"nova-cell1-db-create-9gbtr\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.562656 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33b1a847-d820-4001-ad7e-226c098d4953-operator-scripts\") pod \"nova-cell1-db-create-9gbtr\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.563920 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a238de9-2038-4d87-a2c7-c3646e713865-operator-scripts\") pod \"nova-cell0-1d59-account-create-update-zsxjn\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.591017 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxx72\" (UniqueName: \"kubernetes.io/projected/33b1a847-d820-4001-ad7e-226c098d4953-kube-api-access-jxx72\") pod \"nova-cell1-db-create-9gbtr\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.595745 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wljt6\" (UniqueName: \"kubernetes.io/projected/4a238de9-2038-4d87-a2c7-c3646e713865-kube-api-access-wljt6\") pod \"nova-cell0-1d59-account-create-update-zsxjn\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.668912 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f2c5-account-create-update-kjc5w"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.670692 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.674796 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.675635 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.719719 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f2c5-account-create-update-kjc5w"] Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.790518 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.866809 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-operator-scripts\") pod \"nova-cell1-f2c5-account-create-update-kjc5w\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.866862 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf4lh\" (UniqueName: \"kubernetes.io/projected/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-kube-api-access-lf4lh\") pod \"nova-cell1-f2c5-account-create-update-kjc5w\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.891154 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.151:3000/\": dial tcp 10.217.0.151:3000: connect: connection refused" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.968305 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-operator-scripts\") pod \"nova-cell1-f2c5-account-create-update-kjc5w\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.968357 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf4lh\" (UniqueName: \"kubernetes.io/projected/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-kube-api-access-lf4lh\") pod \"nova-cell1-f2c5-account-create-update-kjc5w\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.969378 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-operator-scripts\") pod \"nova-cell1-f2c5-account-create-update-kjc5w\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.982898 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf4lh\" (UniqueName: \"kubernetes.io/projected/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-kube-api-access-lf4lh\") pod \"nova-cell1-f2c5-account-create-update-kjc5w\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:00 crc kubenswrapper[4669]: I1210 15:40:00.996771 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.439879 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"6e062ab8-0c3e-4566-84d6-32dd4a604f41","Type":"ContainerStarted","Data":"cb6382f6cc4a6ed98ca4986028765d13778bee0283f72d9834ce14a47ce9da03"} Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.463383 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=1.644591976 podStartE2EDuration="14.463365088s" podCreationTimestamp="2025-12-10 15:39:48 +0000 UTC" firstStartedPulling="2025-12-10 15:39:49.069058622 +0000 UTC m=+1162.986005249" lastFinishedPulling="2025-12-10 15:40:01.887831734 +0000 UTC m=+1175.804778361" observedRunningTime="2025-12-10 15:40:02.460311634 +0000 UTC m=+1176.377258251" watchObservedRunningTime="2025-12-10 15:40:02.463365088 +0000 UTC m=+1176.380311705" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.585270 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.664600 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-9gbtr"] Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.676960 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-1075-account-create-update-fw8l2"] Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.700627 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-scripts\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.701128 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-log-httpd\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.701181 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whjm4\" (UniqueName: \"kubernetes.io/projected/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-kube-api-access-whjm4\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.702190 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.702247 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-sg-core-conf-yaml\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.702318 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-combined-ca-bundle\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.702359 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-run-httpd\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.702439 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-config-data\") pod \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\" (UID: \"e23e8ea4-e9da-4b74-9e2d-a2208663ae54\") " Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.703337 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.704713 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.704733 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.711161 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-kube-api-access-whjm4" (OuterVolumeSpecName: "kube-api-access-whjm4") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "kube-api-access-whjm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.724583 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-scripts" (OuterVolumeSpecName: "scripts") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.806717 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.806740 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whjm4\" (UniqueName: \"kubernetes.io/projected/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-kube-api-access-whjm4\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.806854 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f2c5-account-create-update-kjc5w"] Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.900300 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.927010 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.955306 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1d59-account-create-update-zsxjn"] Dec 10 15:40:02 crc kubenswrapper[4669]: I1210 15:40:02.966041 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-gzt9w"] Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.061956 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.137586 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.145423 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-config-data" (OuterVolumeSpecName: "config-data") pod "e23e8ea4-e9da-4b74-9e2d-a2208663ae54" (UID: "e23e8ea4-e9da-4b74-9e2d-a2208663ae54"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.182801 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-j4vkk"] Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.239584 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e23e8ea4-e9da-4b74-9e2d-a2208663ae54-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.463554 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1075-account-create-update-fw8l2" event={"ID":"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab","Type":"ContainerStarted","Data":"f3ab1cbd5ffa9f90f9348b940a66444bad5cf37189854b2da2ef6615211539fb"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.463619 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1075-account-create-update-fw8l2" event={"ID":"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab","Type":"ContainerStarted","Data":"7ab4f2726cc34e14e6da23182a3253d85aa5c83af0cb69c2b71df9c9fa1477fe"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.480251 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-j4vkk" event={"ID":"6026f9fc-7f7c-45cd-b88e-3eb1735014b4","Type":"ContainerStarted","Data":"e9bc9de32c0be2e8b669dff46ed6b66325f9df09ea697bea613a434bc5d6556d"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.484653 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gzt9w" event={"ID":"d4467f81-efe4-46f1-9bce-40afbc34252b","Type":"ContainerStarted","Data":"5c8f559a9e9ef1fb06a24253f1aead09446d147f0577db5cf245949a56453a48"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.493703 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-1075-account-create-update-fw8l2" podStartSLOduration=3.493687184 podStartE2EDuration="3.493687184s" podCreationTimestamp="2025-12-10 15:40:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:03.49353007 +0000 UTC m=+1177.410476697" watchObservedRunningTime="2025-12-10 15:40:03.493687184 +0000 UTC m=+1177.410633821" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.495291 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" event={"ID":"4a238de9-2038-4d87-a2c7-c3646e713865","Type":"ContainerStarted","Data":"f11da454c54f29bd76279a12c6b53757cdc7423b933ed5a021f5d50eeda27198"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.512811 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e23e8ea4-e9da-4b74-9e2d-a2208663ae54","Type":"ContainerDied","Data":"05004f7cf78829c958402d183d1fba3bfa9d3528d721b1c3a7f099c288b6c991"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.512871 4669 scope.go:117] "RemoveContainer" containerID="76d5969153bd21b9e87cb994ca9f91347dba1b9dd4dff79dba16d4c810553344" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.512825 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.517037 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" event={"ID":"d72e907f-0b02-41bf-a8b0-d28a2b7856aa","Type":"ContainerStarted","Data":"4e6ceed21dcf024bc1c85823b06d469b126606c0185ae04969bfdda19cd8f177"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.522543 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9gbtr" event={"ID":"33b1a847-d820-4001-ad7e-226c098d4953","Type":"ContainerStarted","Data":"a4f8b80be3d31881ec8721a84b6c5dcb979941e06be441aa24e0ba0f72e16d61"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.522593 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9gbtr" event={"ID":"33b1a847-d820-4001-ad7e-226c098d4953","Type":"ContainerStarted","Data":"8f5cdbab88756a34a48b827209b3ea0a93f4f9f4446e89339288f6cfc37f1b62"} Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.544676 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" podStartSLOduration=3.544659427 podStartE2EDuration="3.544659427s" podCreationTimestamp="2025-12-10 15:40:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:03.537554484 +0000 UTC m=+1177.454501111" watchObservedRunningTime="2025-12-10 15:40:03.544659427 +0000 UTC m=+1177.461606054" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.576005 4669 scope.go:117] "RemoveContainer" containerID="093261ba1eddba73ae8d57da1762a81806d0b787c05e18dd28e9aa76e719ae86" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.589132 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-9gbtr" podStartSLOduration=3.58911426 podStartE2EDuration="3.58911426s" podCreationTimestamp="2025-12-10 15:40:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:03.575808886 +0000 UTC m=+1177.492755513" watchObservedRunningTime="2025-12-10 15:40:03.58911426 +0000 UTC m=+1177.506060887" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.623564 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.637166 4669 scope.go:117] "RemoveContainer" containerID="3b7258bfbb07d947f4bf3df182f6c5ef90aabba4f4b922f5972f8af10fe969e7" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.661852 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.677438 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:03 crc kubenswrapper[4669]: E1210 15:40:03.677817 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-notification-agent" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.677879 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-notification-agent" Dec 10 15:40:03 crc kubenswrapper[4669]: E1210 15:40:03.677918 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="proxy-httpd" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.677927 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="proxy-httpd" Dec 10 15:40:03 crc kubenswrapper[4669]: E1210 15:40:03.677950 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-central-agent" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.677958 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-central-agent" Dec 10 15:40:03 crc kubenswrapper[4669]: E1210 15:40:03.677974 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="sg-core" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.677981 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="sg-core" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.678172 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-notification-agent" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.678192 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="proxy-httpd" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.678202 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="ceilometer-central-agent" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.678227 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" containerName="sg-core" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.678399 4669 scope.go:117] "RemoveContainer" containerID="a15461ba45d52eba788ae06c91731dc7cb3d7d680b82c39bc9479e48e6c3360d" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.683684 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.687606 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.700182 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.707761 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774303 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-scripts\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774363 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774396 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-log-httpd\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774423 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-run-httpd\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774646 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-config-data\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774807 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.774890 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wwrp2\" (UniqueName: \"kubernetes.io/projected/1e848197-4d84-4fa5-9369-414c21693296-kube-api-access-wwrp2\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876323 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876392 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wwrp2\" (UniqueName: \"kubernetes.io/projected/1e848197-4d84-4fa5-9369-414c21693296-kube-api-access-wwrp2\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876422 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-scripts\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876444 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876466 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-log-httpd\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876493 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-run-httpd\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.876541 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-config-data\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.877699 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-log-httpd\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.877698 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-run-httpd\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.882694 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.885795 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-config-data\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.887826 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-scripts\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.893032 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:03 crc kubenswrapper[4669]: I1210 15:40:03.898034 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wwrp2\" (UniqueName: \"kubernetes.io/projected/1e848197-4d84-4fa5-9369-414c21693296-kube-api-access-wwrp2\") pod \"ceilometer-0\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " pod="openstack/ceilometer-0" Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.085630 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.413436 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e23e8ea4-e9da-4b74-9e2d-a2208663ae54" path="/var/lib/kubelet/pods/e23e8ea4-e9da-4b74-9e2d-a2208663ae54/volumes" Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.530406 4669 generic.go:334] "Generic (PLEG): container finished" podID="d4467f81-efe4-46f1-9bce-40afbc34252b" containerID="6422c87b8c518be5ee4af3a204d158dffc5d8d9d0311aaae38e275a3311aedd0" exitCode=0 Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.530478 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gzt9w" event={"ID":"d4467f81-efe4-46f1-9bce-40afbc34252b","Type":"ContainerDied","Data":"6422c87b8c518be5ee4af3a204d158dffc5d8d9d0311aaae38e275a3311aedd0"} Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.532549 4669 generic.go:334] "Generic (PLEG): container finished" podID="4a238de9-2038-4d87-a2c7-c3646e713865" containerID="47fd761d0633d50b27190e91a9ba7763bb582cae823d088556b911575cf24659" exitCode=0 Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.532595 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" event={"ID":"4a238de9-2038-4d87-a2c7-c3646e713865","Type":"ContainerDied","Data":"47fd761d0633d50b27190e91a9ba7763bb582cae823d088556b911575cf24659"} Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.535705 4669 generic.go:334] "Generic (PLEG): container finished" podID="d72e907f-0b02-41bf-a8b0-d28a2b7856aa" containerID="454788ad57eef6fe87288ed5b332d6773f1557e5690635f347abb5460bb89b4f" exitCode=0 Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.535794 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" event={"ID":"d72e907f-0b02-41bf-a8b0-d28a2b7856aa","Type":"ContainerDied","Data":"454788ad57eef6fe87288ed5b332d6773f1557e5690635f347abb5460bb89b4f"} Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.537607 4669 generic.go:334] "Generic (PLEG): container finished" podID="33b1a847-d820-4001-ad7e-226c098d4953" containerID="a4f8b80be3d31881ec8721a84b6c5dcb979941e06be441aa24e0ba0f72e16d61" exitCode=0 Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.537657 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9gbtr" event={"ID":"33b1a847-d820-4001-ad7e-226c098d4953","Type":"ContainerDied","Data":"a4f8b80be3d31881ec8721a84b6c5dcb979941e06be441aa24e0ba0f72e16d61"} Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.539008 4669 generic.go:334] "Generic (PLEG): container finished" podID="b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" containerID="f3ab1cbd5ffa9f90f9348b940a66444bad5cf37189854b2da2ef6615211539fb" exitCode=0 Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.539040 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1075-account-create-update-fw8l2" event={"ID":"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab","Type":"ContainerDied","Data":"f3ab1cbd5ffa9f90f9348b940a66444bad5cf37189854b2da2ef6615211539fb"} Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.540681 4669 generic.go:334] "Generic (PLEG): container finished" podID="6026f9fc-7f7c-45cd-b88e-3eb1735014b4" containerID="9e77d8aa1ddbf9803b85bd3bd0a12384ea3fbc59111ccd8c7a0bff35637b0ef1" exitCode=0 Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.540729 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-j4vkk" event={"ID":"6026f9fc-7f7c-45cd-b88e-3eb1735014b4","Type":"ContainerDied","Data":"9e77d8aa1ddbf9803b85bd3bd0a12384ea3fbc59111ccd8c7a0bff35637b0ef1"} Dec 10 15:40:04 crc kubenswrapper[4669]: I1210 15:40:04.618009 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:05 crc kubenswrapper[4669]: I1210 15:40:05.569320 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerStarted","Data":"47bab598eb7779872e805c1fbeca090e74bbb5f4d8aa898ac5032610d2ff7e23"} Dec 10 15:40:05 crc kubenswrapper[4669]: I1210 15:40:05.992202 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.012501 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a238de9-2038-4d87-a2c7-c3646e713865-operator-scripts\") pod \"4a238de9-2038-4d87-a2c7-c3646e713865\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.012558 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wljt6\" (UniqueName: \"kubernetes.io/projected/4a238de9-2038-4d87-a2c7-c3646e713865-kube-api-access-wljt6\") pod \"4a238de9-2038-4d87-a2c7-c3646e713865\" (UID: \"4a238de9-2038-4d87-a2c7-c3646e713865\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.014069 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a238de9-2038-4d87-a2c7-c3646e713865-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4a238de9-2038-4d87-a2c7-c3646e713865" (UID: "4a238de9-2038-4d87-a2c7-c3646e713865"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.055749 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a238de9-2038-4d87-a2c7-c3646e713865-kube-api-access-wljt6" (OuterVolumeSpecName: "kube-api-access-wljt6") pod "4a238de9-2038-4d87-a2c7-c3646e713865" (UID: "4a238de9-2038-4d87-a2c7-c3646e713865"). InnerVolumeSpecName "kube-api-access-wljt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.114762 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4a238de9-2038-4d87-a2c7-c3646e713865-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.114804 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wljt6\" (UniqueName: \"kubernetes.io/projected/4a238de9-2038-4d87-a2c7-c3646e713865-kube-api-access-wljt6\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.203409 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.215592 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4467f81-efe4-46f1-9bce-40afbc34252b-operator-scripts\") pod \"d4467f81-efe4-46f1-9bce-40afbc34252b\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.215636 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb29z\" (UniqueName: \"kubernetes.io/projected/d4467f81-efe4-46f1-9bce-40afbc34252b-kube-api-access-tb29z\") pod \"d4467f81-efe4-46f1-9bce-40afbc34252b\" (UID: \"d4467f81-efe4-46f1-9bce-40afbc34252b\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.216421 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d4467f81-efe4-46f1-9bce-40afbc34252b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d4467f81-efe4-46f1-9bce-40afbc34252b" (UID: "d4467f81-efe4-46f1-9bce-40afbc34252b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.220639 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.237980 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4467f81-efe4-46f1-9bce-40afbc34252b-kube-api-access-tb29z" (OuterVolumeSpecName: "kube-api-access-tb29z") pod "d4467f81-efe4-46f1-9bce-40afbc34252b" (UID: "d4467f81-efe4-46f1-9bce-40afbc34252b"). InnerVolumeSpecName "kube-api-access-tb29z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.245228 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.245321 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.255684 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.319692 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-operator-scripts\") pod \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.319780 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33b1a847-d820-4001-ad7e-226c098d4953-operator-scripts\") pod \"33b1a847-d820-4001-ad7e-226c098d4953\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.319898 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-operator-scripts\") pod \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.319929 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf4lh\" (UniqueName: \"kubernetes.io/projected/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-kube-api-access-lf4lh\") pod \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\" (UID: \"d72e907f-0b02-41bf-a8b0-d28a2b7856aa\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.319990 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlrq2\" (UniqueName: \"kubernetes.io/projected/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-kube-api-access-rlrq2\") pod \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\" (UID: \"6026f9fc-7f7c-45cd-b88e-3eb1735014b4\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.320034 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxx72\" (UniqueName: \"kubernetes.io/projected/33b1a847-d820-4001-ad7e-226c098d4953-kube-api-access-jxx72\") pod \"33b1a847-d820-4001-ad7e-226c098d4953\" (UID: \"33b1a847-d820-4001-ad7e-226c098d4953\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.320487 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d4467f81-efe4-46f1-9bce-40afbc34252b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.320510 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb29z\" (UniqueName: \"kubernetes.io/projected/d4467f81-efe4-46f1-9bce-40afbc34252b-kube-api-access-tb29z\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.321630 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d72e907f-0b02-41bf-a8b0-d28a2b7856aa" (UID: "d72e907f-0b02-41bf-a8b0-d28a2b7856aa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.323588 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/33b1a847-d820-4001-ad7e-226c098d4953-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "33b1a847-d820-4001-ad7e-226c098d4953" (UID: "33b1a847-d820-4001-ad7e-226c098d4953"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.328395 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/33b1a847-d820-4001-ad7e-226c098d4953-kube-api-access-jxx72" (OuterVolumeSpecName: "kube-api-access-jxx72") pod "33b1a847-d820-4001-ad7e-226c098d4953" (UID: "33b1a847-d820-4001-ad7e-226c098d4953"). InnerVolumeSpecName "kube-api-access-jxx72". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.328665 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6026f9fc-7f7c-45cd-b88e-3eb1735014b4" (UID: "6026f9fc-7f7c-45cd-b88e-3eb1735014b4"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.328907 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.330436 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-kube-api-access-lf4lh" (OuterVolumeSpecName: "kube-api-access-lf4lh") pod "d72e907f-0b02-41bf-a8b0-d28a2b7856aa" (UID: "d72e907f-0b02-41bf-a8b0-d28a2b7856aa"). InnerVolumeSpecName "kube-api-access-lf4lh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.338797 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-kube-api-access-rlrq2" (OuterVolumeSpecName: "kube-api-access-rlrq2") pod "6026f9fc-7f7c-45cd-b88e-3eb1735014b4" (UID: "6026f9fc-7f7c-45cd-b88e-3eb1735014b4"). InnerVolumeSpecName "kube-api-access-rlrq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.421907 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5bvz4\" (UniqueName: \"kubernetes.io/projected/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-kube-api-access-5bvz4\") pod \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.422314 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-operator-scripts\") pod \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\" (UID: \"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab\") " Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.427935 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.427978 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/33b1a847-d820-4001-ad7e-226c098d4953-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.428000 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.428010 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf4lh\" (UniqueName: \"kubernetes.io/projected/d72e907f-0b02-41bf-a8b0-d28a2b7856aa-kube-api-access-lf4lh\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.428022 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlrq2\" (UniqueName: \"kubernetes.io/projected/6026f9fc-7f7c-45cd-b88e-3eb1735014b4-kube-api-access-rlrq2\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.428032 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxx72\" (UniqueName: \"kubernetes.io/projected/33b1a847-d820-4001-ad7e-226c098d4953-kube-api-access-jxx72\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.428611 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-kube-api-access-5bvz4" (OuterVolumeSpecName: "kube-api-access-5bvz4") pod "b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" (UID: "b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab"). InnerVolumeSpecName "kube-api-access-5bvz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.430538 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" (UID: "b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.529537 4669 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.529871 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5bvz4\" (UniqueName: \"kubernetes.io/projected/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab-kube-api-access-5bvz4\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.622947 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-9gbtr" event={"ID":"33b1a847-d820-4001-ad7e-226c098d4953","Type":"ContainerDied","Data":"8f5cdbab88756a34a48b827209b3ea0a93f4f9f4446e89339288f6cfc37f1b62"} Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.622991 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f5cdbab88756a34a48b827209b3ea0a93f4f9f4446e89339288f6cfc37f1b62" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.623074 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-9gbtr" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.648922 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-1075-account-create-update-fw8l2" event={"ID":"b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab","Type":"ContainerDied","Data":"7ab4f2726cc34e14e6da23182a3253d85aa5c83af0cb69c2b71df9c9fa1477fe"} Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.648963 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ab4f2726cc34e14e6da23182a3253d85aa5c83af0cb69c2b71df9c9fa1477fe" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.649036 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-1075-account-create-update-fw8l2" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.654951 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-j4vkk" event={"ID":"6026f9fc-7f7c-45cd-b88e-3eb1735014b4","Type":"ContainerDied","Data":"e9bc9de32c0be2e8b669dff46ed6b66325f9df09ea697bea613a434bc5d6556d"} Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.654999 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9bc9de32c0be2e8b669dff46ed6b66325f9df09ea697bea613a434bc5d6556d" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.655077 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-j4vkk" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.661340 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-gzt9w" event={"ID":"d4467f81-efe4-46f1-9bce-40afbc34252b","Type":"ContainerDied","Data":"5c8f559a9e9ef1fb06a24253f1aead09446d147f0577db5cf245949a56453a48"} Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.661523 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5c8f559a9e9ef1fb06a24253f1aead09446d147f0577db5cf245949a56453a48" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.661365 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-gzt9w" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.664161 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" event={"ID":"4a238de9-2038-4d87-a2c7-c3646e713865","Type":"ContainerDied","Data":"f11da454c54f29bd76279a12c6b53757cdc7423b933ed5a021f5d50eeda27198"} Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.664188 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f11da454c54f29bd76279a12c6b53757cdc7423b933ed5a021f5d50eeda27198" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.664173 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1d59-account-create-update-zsxjn" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.665530 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" event={"ID":"d72e907f-0b02-41bf-a8b0-d28a2b7856aa","Type":"ContainerDied","Data":"4e6ceed21dcf024bc1c85823b06d469b126606c0185ae04969bfdda19cd8f177"} Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.665607 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e6ceed21dcf024bc1c85823b06d469b126606c0185ae04969bfdda19cd8f177" Dec 10 15:40:06 crc kubenswrapper[4669]: I1210 15:40:06.665627 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f2c5-account-create-update-kjc5w" Dec 10 15:40:07 crc kubenswrapper[4669]: I1210 15:40:07.675831 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerStarted","Data":"7bbab919e6056930e656b73ef3a064e5434ac0c2283195b3bfc3f30caae0b839"} Dec 10 15:40:08 crc kubenswrapper[4669]: I1210 15:40:08.686650 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerStarted","Data":"53d98d39332ab5f9e923d7788e5b71d0f7936a26fbd54dfd60afd435b7e12179"} Dec 10 15:40:09 crc kubenswrapper[4669]: I1210 15:40:09.699321 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerStarted","Data":"fd3cf4d1ec9a358759a449810fc92bcedbae8e6fb358295ba19535c868948d56"} Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.710091 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerStarted","Data":"7fddb993937af17086038a0d92639eab290752bf51b3c98594894b59d6a81d78"} Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.710566 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.710423 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="sg-core" containerID="cri-o://fd3cf4d1ec9a358759a449810fc92bcedbae8e6fb358295ba19535c868948d56" gracePeriod=30 Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.710337 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="proxy-httpd" containerID="cri-o://7fddb993937af17086038a0d92639eab290752bf51b3c98594894b59d6a81d78" gracePeriod=30 Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.710468 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-notification-agent" containerID="cri-o://53d98d39332ab5f9e923d7788e5b71d0f7936a26fbd54dfd60afd435b7e12179" gracePeriod=30 Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.710963 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-central-agent" containerID="cri-o://7bbab919e6056930e656b73ef3a064e5434ac0c2283195b3bfc3f30caae0b839" gracePeriod=30 Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.751763 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.331363357 podStartE2EDuration="7.751736545s" podCreationTimestamp="2025-12-10 15:40:03 +0000 UTC" firstStartedPulling="2025-12-10 15:40:04.634624116 +0000 UTC m=+1178.551570743" lastFinishedPulling="2025-12-10 15:40:10.054997304 +0000 UTC m=+1183.971943931" observedRunningTime="2025-12-10 15:40:10.743434463 +0000 UTC m=+1184.660381110" watchObservedRunningTime="2025-12-10 15:40:10.751736545 +0000 UTC m=+1184.668683172" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.830685 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wvfpg"] Dec 10 15:40:10 crc kubenswrapper[4669]: E1210 15:40:10.831141 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6026f9fc-7f7c-45cd-b88e-3eb1735014b4" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831165 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="6026f9fc-7f7c-45cd-b88e-3eb1735014b4" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: E1210 15:40:10.831190 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4467f81-efe4-46f1-9bce-40afbc34252b" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831199 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4467f81-efe4-46f1-9bce-40afbc34252b" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: E1210 15:40:10.831237 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="33b1a847-d820-4001-ad7e-226c098d4953" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831249 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="33b1a847-d820-4001-ad7e-226c098d4953" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: E1210 15:40:10.831261 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d72e907f-0b02-41bf-a8b0-d28a2b7856aa" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831269 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="d72e907f-0b02-41bf-a8b0-d28a2b7856aa" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: E1210 15:40:10.831293 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a238de9-2038-4d87-a2c7-c3646e713865" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831302 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a238de9-2038-4d87-a2c7-c3646e713865" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: E1210 15:40:10.831334 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831343 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831554 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831573 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a238de9-2038-4d87-a2c7-c3646e713865" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831590 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4467f81-efe4-46f1-9bce-40afbc34252b" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831600 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="33b1a847-d820-4001-ad7e-226c098d4953" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831616 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="6026f9fc-7f7c-45cd-b88e-3eb1735014b4" containerName="mariadb-database-create" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.831628 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="d72e907f-0b02-41bf-a8b0-d28a2b7856aa" containerName="mariadb-account-create-update" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.836046 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.841580 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.842068 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-kj7vc" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.842371 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.868072 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wvfpg"] Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.930096 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-config-data\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.930451 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.930556 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-scripts\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:10 crc kubenswrapper[4669]: I1210 15:40:10.930718 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cprps\" (UniqueName: \"kubernetes.io/projected/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-kube-api-access-cprps\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.032900 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-config-data\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.033032 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.033058 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-scripts\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.033992 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cprps\" (UniqueName: \"kubernetes.io/projected/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-kube-api-access-cprps\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.039333 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-scripts\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.040824 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-config-data\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.056643 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.066738 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cprps\" (UniqueName: \"kubernetes.io/projected/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-kube-api-access-cprps\") pod \"nova-cell0-conductor-db-sync-wvfpg\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.222464 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.721503 4669 generic.go:334] "Generic (PLEG): container finished" podID="1e848197-4d84-4fa5-9369-414c21693296" containerID="7fddb993937af17086038a0d92639eab290752bf51b3c98594894b59d6a81d78" exitCode=0 Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.721768 4669 generic.go:334] "Generic (PLEG): container finished" podID="1e848197-4d84-4fa5-9369-414c21693296" containerID="fd3cf4d1ec9a358759a449810fc92bcedbae8e6fb358295ba19535c868948d56" exitCode=2 Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.721775 4669 generic.go:334] "Generic (PLEG): container finished" podID="1e848197-4d84-4fa5-9369-414c21693296" containerID="53d98d39332ab5f9e923d7788e5b71d0f7936a26fbd54dfd60afd435b7e12179" exitCode=0 Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.721581 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerDied","Data":"7fddb993937af17086038a0d92639eab290752bf51b3c98594894b59d6a81d78"} Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.721819 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerDied","Data":"fd3cf4d1ec9a358759a449810fc92bcedbae8e6fb358295ba19535c868948d56"} Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.721838 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerDied","Data":"53d98d39332ab5f9e923d7788e5b71d0f7936a26fbd54dfd60afd435b7e12179"} Dec 10 15:40:11 crc kubenswrapper[4669]: I1210 15:40:11.783051 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wvfpg"] Dec 10 15:40:12 crc kubenswrapper[4669]: I1210 15:40:12.732332 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" event={"ID":"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35","Type":"ContainerStarted","Data":"2fc718f93c2b69b82b433cdd2f0c42fe19cd13e2f1188e98c86f786eb5f5eb22"} Dec 10 15:40:15 crc kubenswrapper[4669]: I1210 15:40:15.758713 4669 generic.go:334] "Generic (PLEG): container finished" podID="1e848197-4d84-4fa5-9369-414c21693296" containerID="7bbab919e6056930e656b73ef3a064e5434ac0c2283195b3bfc3f30caae0b839" exitCode=0 Dec 10 15:40:15 crc kubenswrapper[4669]: I1210 15:40:15.759227 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerDied","Data":"7bbab919e6056930e656b73ef3a064e5434ac0c2283195b3bfc3f30caae0b839"} Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.253721 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.306158 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-sg-core-conf-yaml\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.306247 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-log-httpd\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.306289 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wwrp2\" (UniqueName: \"kubernetes.io/projected/1e848197-4d84-4fa5-9369-414c21693296-kube-api-access-wwrp2\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.306680 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-combined-ca-bundle\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.306967 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.307014 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-scripts\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.307112 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-config-data\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.307141 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-run-httpd\") pod \"1e848197-4d84-4fa5-9369-414c21693296\" (UID: \"1e848197-4d84-4fa5-9369-414c21693296\") " Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.307989 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.308129 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.313125 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e848197-4d84-4fa5-9369-414c21693296-kube-api-access-wwrp2" (OuterVolumeSpecName: "kube-api-access-wwrp2") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "kube-api-access-wwrp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.313564 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-scripts" (OuterVolumeSpecName: "scripts") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.351714 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.398761 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.409427 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.409597 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.409664 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/1e848197-4d84-4fa5-9369-414c21693296-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.409720 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.409812 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wwrp2\" (UniqueName: \"kubernetes.io/projected/1e848197-4d84-4fa5-9369-414c21693296-kube-api-access-wwrp2\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.433970 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-config-data" (OuterVolumeSpecName: "config-data") pod "1e848197-4d84-4fa5-9369-414c21693296" (UID: "1e848197-4d84-4fa5-9369-414c21693296"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.511916 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e848197-4d84-4fa5-9369-414c21693296-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.807439 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" event={"ID":"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35","Type":"ContainerStarted","Data":"ec8a44b8eb017eaceb9c4f296e7f2b645a986f540fefda606a917f5a35a81199"} Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.810717 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"1e848197-4d84-4fa5-9369-414c21693296","Type":"ContainerDied","Data":"47bab598eb7779872e805c1fbeca090e74bbb5f4d8aa898ac5032610d2ff7e23"} Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.810764 4669 scope.go:117] "RemoveContainer" containerID="7fddb993937af17086038a0d92639eab290752bf51b3c98594894b59d6a81d78" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.810900 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.831864 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" podStartSLOduration=2.577873506 podStartE2EDuration="10.831838723s" podCreationTimestamp="2025-12-10 15:40:10 +0000 UTC" firstStartedPulling="2025-12-10 15:40:11.787565455 +0000 UTC m=+1185.704512092" lastFinishedPulling="2025-12-10 15:40:20.041530682 +0000 UTC m=+1193.958477309" observedRunningTime="2025-12-10 15:40:20.822826644 +0000 UTC m=+1194.739773271" watchObservedRunningTime="2025-12-10 15:40:20.831838723 +0000 UTC m=+1194.748785360" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.854114 4669 scope.go:117] "RemoveContainer" containerID="fd3cf4d1ec9a358759a449810fc92bcedbae8e6fb358295ba19535c868948d56" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.859005 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.881095 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.891917 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:20 crc kubenswrapper[4669]: E1210 15:40:20.892287 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-notification-agent" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892308 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-notification-agent" Dec 10 15:40:20 crc kubenswrapper[4669]: E1210 15:40:20.892325 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-central-agent" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892332 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-central-agent" Dec 10 15:40:20 crc kubenswrapper[4669]: E1210 15:40:20.892371 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="proxy-httpd" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892378 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="proxy-httpd" Dec 10 15:40:20 crc kubenswrapper[4669]: E1210 15:40:20.892388 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="sg-core" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892394 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="sg-core" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892569 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-central-agent" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892585 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="sg-core" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892603 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="proxy-httpd" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.892611 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e848197-4d84-4fa5-9369-414c21693296" containerName="ceilometer-notification-agent" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.896319 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.898981 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.899506 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.902256 4669 scope.go:117] "RemoveContainer" containerID="53d98d39332ab5f9e923d7788e5b71d0f7936a26fbd54dfd60afd435b7e12179" Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.907677 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:20 crc kubenswrapper[4669]: I1210 15:40:20.948232 4669 scope.go:117] "RemoveContainer" containerID="7bbab919e6056930e656b73ef3a064e5434ac0c2283195b3bfc3f30caae0b839" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.018402 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-config-data\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.018855 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-scripts\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.018914 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-log-httpd\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.018982 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.019023 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.019128 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-run-httpd\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.019165 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzpqt\" (UniqueName: \"kubernetes.io/projected/53d10856-fd2d-4030-a435-becd420536d9-kube-api-access-nzpqt\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.120787 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-run-httpd\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.120840 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzpqt\" (UniqueName: \"kubernetes.io/projected/53d10856-fd2d-4030-a435-becd420536d9-kube-api-access-nzpqt\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.120926 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-config-data\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.121018 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-scripts\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.121086 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-log-httpd\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.121124 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.121150 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.121603 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-run-httpd\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.121871 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-log-httpd\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.127119 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.127476 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-config-data\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.128828 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-scripts\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.134126 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.137558 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzpqt\" (UniqueName: \"kubernetes.io/projected/53d10856-fd2d-4030-a435-becd420536d9-kube-api-access-nzpqt\") pod \"ceilometer-0\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.216888 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.694657 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:21 crc kubenswrapper[4669]: W1210 15:40:21.697663 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d10856_fd2d_4030_a435_becd420536d9.slice/crio-3a021f78c1937725e159a0887f12d56b6b106a055cef9d107eaa5681e24a2016 WatchSource:0}: Error finding container 3a021f78c1937725e159a0887f12d56b6b106a055cef9d107eaa5681e24a2016: Status 404 returned error can't find the container with id 3a021f78c1937725e159a0887f12d56b6b106a055cef9d107eaa5681e24a2016 Dec 10 15:40:21 crc kubenswrapper[4669]: I1210 15:40:21.821379 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerStarted","Data":"3a021f78c1937725e159a0887f12d56b6b106a055cef9d107eaa5681e24a2016"} Dec 10 15:40:22 crc kubenswrapper[4669]: I1210 15:40:22.415363 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e848197-4d84-4fa5-9369-414c21693296" path="/var/lib/kubelet/pods/1e848197-4d84-4fa5-9369-414c21693296/volumes" Dec 10 15:40:22 crc kubenswrapper[4669]: I1210 15:40:22.833694 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerStarted","Data":"29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0"} Dec 10 15:40:23 crc kubenswrapper[4669]: I1210 15:40:23.845461 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerStarted","Data":"e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d"} Dec 10 15:40:24 crc kubenswrapper[4669]: I1210 15:40:24.854722 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerStarted","Data":"aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa"} Dec 10 15:40:25 crc kubenswrapper[4669]: I1210 15:40:25.333267 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.889867 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerStarted","Data":"863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8"} Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.890263 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.890695 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="proxy-httpd" containerID="cri-o://863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8" gracePeriod=30 Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.890970 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="sg-core" containerID="cri-o://aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa" gracePeriod=30 Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.891041 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-notification-agent" containerID="cri-o://e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d" gracePeriod=30 Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.913648 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-central-agent" containerID="cri-o://29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0" gracePeriod=30 Dec 10 15:40:26 crc kubenswrapper[4669]: I1210 15:40:26.923590 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.734146251 podStartE2EDuration="6.923571383s" podCreationTimestamp="2025-12-10 15:40:20 +0000 UTC" firstStartedPulling="2025-12-10 15:40:21.700271381 +0000 UTC m=+1195.617218018" lastFinishedPulling="2025-12-10 15:40:25.889696523 +0000 UTC m=+1199.806643150" observedRunningTime="2025-12-10 15:40:26.919966926 +0000 UTC m=+1200.836913563" watchObservedRunningTime="2025-12-10 15:40:26.923571383 +0000 UTC m=+1200.840518010" Dec 10 15:40:27 crc kubenswrapper[4669]: E1210 15:40:27.168074 4669 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d10856_fd2d_4030_a435_becd420536d9.slice/crio-conmon-aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53d10856_fd2d_4030_a435_becd420536d9.slice/crio-863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8.scope\": RecentStats: unable to find data in memory cache]" Dec 10 15:40:27 crc kubenswrapper[4669]: I1210 15:40:27.905559 4669 generic.go:334] "Generic (PLEG): container finished" podID="53d10856-fd2d-4030-a435-becd420536d9" containerID="863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8" exitCode=0 Dec 10 15:40:27 crc kubenswrapper[4669]: I1210 15:40:27.905625 4669 generic.go:334] "Generic (PLEG): container finished" podID="53d10856-fd2d-4030-a435-becd420536d9" containerID="aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa" exitCode=2 Dec 10 15:40:27 crc kubenswrapper[4669]: I1210 15:40:27.905648 4669 generic.go:334] "Generic (PLEG): container finished" podID="53d10856-fd2d-4030-a435-becd420536d9" containerID="e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d" exitCode=0 Dec 10 15:40:27 crc kubenswrapper[4669]: I1210 15:40:27.905706 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerDied","Data":"863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8"} Dec 10 15:40:27 crc kubenswrapper[4669]: I1210 15:40:27.905753 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerDied","Data":"aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa"} Dec 10 15:40:27 crc kubenswrapper[4669]: I1210 15:40:27.905781 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerDied","Data":"e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d"} Dec 10 15:40:28 crc kubenswrapper[4669]: I1210 15:40:28.744652 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:40:28 crc kubenswrapper[4669]: I1210 15:40:28.745042 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.819589 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.927319 4669 generic.go:334] "Generic (PLEG): container finished" podID="53d10856-fd2d-4030-a435-becd420536d9" containerID="29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0" exitCode=0 Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.927387 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerDied","Data":"29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0"} Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.927420 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"53d10856-fd2d-4030-a435-becd420536d9","Type":"ContainerDied","Data":"3a021f78c1937725e159a0887f12d56b6b106a055cef9d107eaa5681e24a2016"} Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.927443 4669 scope.go:117] "RemoveContainer" containerID="863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.927624 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.977704 4669 scope.go:117] "RemoveContainer" containerID="aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991052 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-scripts\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991134 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-combined-ca-bundle\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991171 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-config-data\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991193 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-log-httpd\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991242 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzpqt\" (UniqueName: \"kubernetes.io/projected/53d10856-fd2d-4030-a435-becd420536d9-kube-api-access-nzpqt\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991273 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-sg-core-conf-yaml\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991302 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-run-httpd\") pod \"53d10856-fd2d-4030-a435-becd420536d9\" (UID: \"53d10856-fd2d-4030-a435-becd420536d9\") " Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.991804 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.992127 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:29 crc kubenswrapper[4669]: I1210 15:40:29.999506 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-scripts" (OuterVolumeSpecName: "scripts") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.003017 4669 scope.go:117] "RemoveContainer" containerID="e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.018187 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53d10856-fd2d-4030-a435-becd420536d9-kube-api-access-nzpqt" (OuterVolumeSpecName: "kube-api-access-nzpqt") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "kube-api-access-nzpqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.055623 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.092618 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.092658 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzpqt\" (UniqueName: \"kubernetes.io/projected/53d10856-fd2d-4030-a435-becd420536d9-kube-api-access-nzpqt\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.092669 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.092677 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/53d10856-fd2d-4030-a435-becd420536d9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.092685 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.102999 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-config-data" (OuterVolumeSpecName: "config-data") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.117874 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "53d10856-fd2d-4030-a435-becd420536d9" (UID: "53d10856-fd2d-4030-a435-becd420536d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.131102 4669 scope.go:117] "RemoveContainer" containerID="29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.151279 4669 scope.go:117] "RemoveContainer" containerID="863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.151800 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8\": container with ID starting with 863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8 not found: ID does not exist" containerID="863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.151839 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8"} err="failed to get container status \"863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8\": rpc error: code = NotFound desc = could not find container \"863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8\": container with ID starting with 863681b23e6f7be7023a2cf4f940fb2e13ba151f73d94e88952adb495197a1f8 not found: ID does not exist" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.151860 4669 scope.go:117] "RemoveContainer" containerID="aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.152165 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa\": container with ID starting with aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa not found: ID does not exist" containerID="aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.152232 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa"} err="failed to get container status \"aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa\": rpc error: code = NotFound desc = could not find container \"aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa\": container with ID starting with aaa48a6f9b79eb710a913a19ff123d3a3d9138fc4a0a8869340dab0cae255dfa not found: ID does not exist" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.152269 4669 scope.go:117] "RemoveContainer" containerID="e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.152786 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d\": container with ID starting with e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d not found: ID does not exist" containerID="e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.152920 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d"} err="failed to get container status \"e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d\": rpc error: code = NotFound desc = could not find container \"e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d\": container with ID starting with e25da9c59f20b3c7bf8d29c4a12ac75f54352837379b3f139caecc60ecf5ab4d not found: ID does not exist" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.153026 4669 scope.go:117] "RemoveContainer" containerID="29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.153481 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0\": container with ID starting with 29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0 not found: ID does not exist" containerID="29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.153507 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0"} err="failed to get container status \"29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0\": rpc error: code = NotFound desc = could not find container \"29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0\": container with ID starting with 29039329cd1b5c1b0604554c1e518be191594ee0c0e4bc62f9c7df28884e1aa0 not found: ID does not exist" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.194097 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.194813 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/53d10856-fd2d-4030-a435-becd420536d9-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.261667 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.273834 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.305533 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.319964 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="sg-core" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320006 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="sg-core" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.320024 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-notification-agent" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320031 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-notification-agent" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.320051 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="proxy-httpd" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320059 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="proxy-httpd" Dec 10 15:40:30 crc kubenswrapper[4669]: E1210 15:40:30.320096 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-central-agent" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320102 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-central-agent" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320538 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="sg-core" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320574 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-notification-agent" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320588 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="ceilometer-central-agent" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.320607 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="53d10856-fd2d-4030-a435-becd420536d9" containerName="proxy-httpd" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.326738 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.330519 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.333894 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.366164 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.409846 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="53d10856-fd2d-4030-a435-becd420536d9" path="/var/lib/kubelet/pods/53d10856-fd2d-4030-a435-becd420536d9/volumes" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.507143 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-run-httpd\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.507339 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.507489 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-config-data\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.507664 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-log-httpd\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.507792 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftdmw\" (UniqueName: \"kubernetes.io/projected/cfb2ed8e-311c-4857-8065-57d98d5c7031-kube-api-access-ftdmw\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.507945 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-scripts\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.508258 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610293 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610352 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-run-httpd\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610396 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610432 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-config-data\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610487 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-log-httpd\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610513 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftdmw\" (UniqueName: \"kubernetes.io/projected/cfb2ed8e-311c-4857-8065-57d98d5c7031-kube-api-access-ftdmw\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.610577 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-scripts\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.611825 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-run-httpd\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.612151 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-log-httpd\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.613804 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.616905 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-config-data\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.629321 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-scripts\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.630251 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.630251 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftdmw\" (UniqueName: \"kubernetes.io/projected/cfb2ed8e-311c-4857-8065-57d98d5c7031-kube-api-access-ftdmw\") pod \"ceilometer-0\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " pod="openstack/ceilometer-0" Dec 10 15:40:30 crc kubenswrapper[4669]: I1210 15:40:30.659793 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:40:31 crc kubenswrapper[4669]: W1210 15:40:31.156346 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfb2ed8e_311c_4857_8065_57d98d5c7031.slice/crio-db2987efbb73ef3ab9353f33fa1ac70c6468ed72860a74906df9b9165653e253 WatchSource:0}: Error finding container db2987efbb73ef3ab9353f33fa1ac70c6468ed72860a74906df9b9165653e253: Status 404 returned error can't find the container with id db2987efbb73ef3ab9353f33fa1ac70c6468ed72860a74906df9b9165653e253 Dec 10 15:40:31 crc kubenswrapper[4669]: I1210 15:40:31.156819 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:40:31 crc kubenswrapper[4669]: I1210 15:40:31.950907 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerStarted","Data":"db2987efbb73ef3ab9353f33fa1ac70c6468ed72860a74906df9b9165653e253"} Dec 10 15:40:32 crc kubenswrapper[4669]: I1210 15:40:32.967032 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerStarted","Data":"3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b"} Dec 10 15:40:32 crc kubenswrapper[4669]: I1210 15:40:32.970060 4669 generic.go:334] "Generic (PLEG): container finished" podID="2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" containerID="ec8a44b8eb017eaceb9c4f296e7f2b645a986f540fefda606a917f5a35a81199" exitCode=0 Dec 10 15:40:32 crc kubenswrapper[4669]: I1210 15:40:32.970168 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" event={"ID":"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35","Type":"ContainerDied","Data":"ec8a44b8eb017eaceb9c4f296e7f2b645a986f540fefda606a917f5a35a81199"} Dec 10 15:40:33 crc kubenswrapper[4669]: I1210 15:40:33.982377 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerStarted","Data":"7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3"} Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.295132 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.380639 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-config-data\") pod \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.380849 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-scripts\") pod \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.381176 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cprps\" (UniqueName: \"kubernetes.io/projected/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-kube-api-access-cprps\") pod \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.381257 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-combined-ca-bundle\") pod \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\" (UID: \"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35\") " Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.397909 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-scripts" (OuterVolumeSpecName: "scripts") pod "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" (UID: "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.398040 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-kube-api-access-cprps" (OuterVolumeSpecName: "kube-api-access-cprps") pod "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" (UID: "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35"). InnerVolumeSpecName "kube-api-access-cprps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.419376 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" (UID: "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.451464 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-config-data" (OuterVolumeSpecName: "config-data") pod "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" (UID: "2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.483014 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.483049 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cprps\" (UniqueName: \"kubernetes.io/projected/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-kube-api-access-cprps\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.483061 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:34 crc kubenswrapper[4669]: I1210 15:40:34.483071 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.008124 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" event={"ID":"2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35","Type":"ContainerDied","Data":"2fc718f93c2b69b82b433cdd2f0c42fe19cd13e2f1188e98c86f786eb5f5eb22"} Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.008537 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2fc718f93c2b69b82b433cdd2f0c42fe19cd13e2f1188e98c86f786eb5f5eb22" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.008638 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-wvfpg" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.108283 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 15:40:35 crc kubenswrapper[4669]: E1210 15:40:35.108931 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" containerName="nova-cell0-conductor-db-sync" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.109011 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" containerName="nova-cell0-conductor-db-sync" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.109333 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" containerName="nova-cell0-conductor-db-sync" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.109964 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.113739 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.113893 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-kj7vc" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.129602 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.198995 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dffe45fb-00df-4b42-8982-fc996a9707dc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.199352 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t72tw\" (UniqueName: \"kubernetes.io/projected/dffe45fb-00df-4b42-8982-fc996a9707dc-kube-api-access-t72tw\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.199468 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dffe45fb-00df-4b42-8982-fc996a9707dc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.301360 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dffe45fb-00df-4b42-8982-fc996a9707dc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.301475 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dffe45fb-00df-4b42-8982-fc996a9707dc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.301604 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t72tw\" (UniqueName: \"kubernetes.io/projected/dffe45fb-00df-4b42-8982-fc996a9707dc-kube-api-access-t72tw\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.304854 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dffe45fb-00df-4b42-8982-fc996a9707dc-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.305630 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dffe45fb-00df-4b42-8982-fc996a9707dc-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.316918 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t72tw\" (UniqueName: \"kubernetes.io/projected/dffe45fb-00df-4b42-8982-fc996a9707dc-kube-api-access-t72tw\") pod \"nova-cell0-conductor-0\" (UID: \"dffe45fb-00df-4b42-8982-fc996a9707dc\") " pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.425687 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:35 crc kubenswrapper[4669]: I1210 15:40:35.927014 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 10 15:40:36 crc kubenswrapper[4669]: I1210 15:40:36.017503 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"dffe45fb-00df-4b42-8982-fc996a9707dc","Type":"ContainerStarted","Data":"7abcb32691f7938498187da5f01c3ec69fe888a4c0f57f514a444b0978a2e040"} Dec 10 15:40:36 crc kubenswrapper[4669]: I1210 15:40:36.021788 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerStarted","Data":"3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150"} Dec 10 15:40:37 crc kubenswrapper[4669]: I1210 15:40:37.032768 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerStarted","Data":"3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45"} Dec 10 15:40:37 crc kubenswrapper[4669]: I1210 15:40:37.035717 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"dffe45fb-00df-4b42-8982-fc996a9707dc","Type":"ContainerStarted","Data":"67c227cb752d57cdedcc9a70d55be75493df36dcef8b1d6724b503e88989f99b"} Dec 10 15:40:37 crc kubenswrapper[4669]: I1210 15:40:37.035855 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:40:37 crc kubenswrapper[4669]: I1210 15:40:37.035922 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:37 crc kubenswrapper[4669]: I1210 15:40:37.059374 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.663367035 podStartE2EDuration="7.059349659s" podCreationTimestamp="2025-12-10 15:40:30 +0000 UTC" firstStartedPulling="2025-12-10 15:40:31.158230068 +0000 UTC m=+1205.075176695" lastFinishedPulling="2025-12-10 15:40:36.554212682 +0000 UTC m=+1210.471159319" observedRunningTime="2025-12-10 15:40:37.052079742 +0000 UTC m=+1210.969026369" watchObservedRunningTime="2025-12-10 15:40:37.059349659 +0000 UTC m=+1210.976296286" Dec 10 15:40:37 crc kubenswrapper[4669]: I1210 15:40:37.077304 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.077280147 podStartE2EDuration="2.077280147s" podCreationTimestamp="2025-12-10 15:40:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:37.072365417 +0000 UTC m=+1210.989312044" watchObservedRunningTime="2025-12-10 15:40:37.077280147 +0000 UTC m=+1210.994226774" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.460514 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.922101 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-szq9q"] Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.923158 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.927959 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.928089 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.938558 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-szq9q"] Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.993138 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhs85\" (UniqueName: \"kubernetes.io/projected/d87be1f9-462a-4c6e-b252-11f57a2efe0f-kube-api-access-lhs85\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.993195 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.993304 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-scripts\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:45 crc kubenswrapper[4669]: I1210 15:40:45.993342 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-config-data\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.086341 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.087670 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.091746 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.098174 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-scripts\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.098244 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-config-data\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.098331 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhs85\" (UniqueName: \"kubernetes.io/projected/d87be1f9-462a-4c6e-b252-11f57a2efe0f-kube-api-access-lhs85\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.098357 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.101035 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.104769 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.109993 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-config-data\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.110344 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-scripts\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.180108 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhs85\" (UniqueName: \"kubernetes.io/projected/d87be1f9-462a-4c6e-b252-11f57a2efe0f-kube-api-access-lhs85\") pod \"nova-cell0-cell-mapping-szq9q\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.204169 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.204540 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qn984\" (UniqueName: \"kubernetes.io/projected/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-kube-api-access-qn984\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.204606 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-config-data\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.239575 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.260200 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.261934 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.266635 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.284731 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305685 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-logs\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305754 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7qwv\" (UniqueName: \"kubernetes.io/projected/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-kube-api-access-z7qwv\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305792 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qn984\" (UniqueName: \"kubernetes.io/projected/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-kube-api-access-qn984\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305858 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-config-data\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305878 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305931 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-config-data\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.305960 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.317429 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-config-data\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.334650 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.382365 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.383825 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.386275 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.394522 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qn984\" (UniqueName: \"kubernetes.io/projected/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-kube-api-access-qn984\") pod \"nova-scheduler-0\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.407907 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7qwv\" (UniqueName: \"kubernetes.io/projected/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-kube-api-access-z7qwv\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.408005 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.408046 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-config-data\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.408084 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-logs\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.408642 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-logs\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.426146 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-config-data\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.428131 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.460230 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.460265 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.461122 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.461192 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.466863 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7qwv\" (UniqueName: \"kubernetes.io/projected/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-kube-api-access-z7qwv\") pod \"nova-api-0\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.467103 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.509514 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.513976 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45rtw\" (UniqueName: \"kubernetes.io/projected/deddf829-5b45-4f9e-ad77-943c90e4ae60-kube-api-access-45rtw\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.514242 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deddf829-5b45-4f9e-ad77-943c90e4ae60-logs\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.516700 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.516850 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.517021 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sbzst\" (UniqueName: \"kubernetes.io/projected/34e172e1-eb30-4db4-aa0a-89e5816aa04f-kube-api-access-sbzst\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.517112 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-config-data\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.533097 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.588426 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-ngxq7"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.590010 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.601400 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-ngxq7"] Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618628 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618671 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x66x\" (UniqueName: \"kubernetes.io/projected/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-kube-api-access-7x66x\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618692 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deddf829-5b45-4f9e-ad77-943c90e4ae60-logs\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618722 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618739 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618786 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sbzst\" (UniqueName: \"kubernetes.io/projected/34e172e1-eb30-4db4-aa0a-89e5816aa04f-kube-api-access-sbzst\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618806 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-config-data\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618857 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618879 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618921 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45rtw\" (UniqueName: \"kubernetes.io/projected/deddf829-5b45-4f9e-ad77-943c90e4ae60-kube-api-access-45rtw\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618959 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.618979 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-config\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.619775 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deddf829-5b45-4f9e-ad77-943c90e4ae60-logs\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.634085 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.635804 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-config-data\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.646095 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45rtw\" (UniqueName: \"kubernetes.io/projected/deddf829-5b45-4f9e-ad77-943c90e4ae60-kube-api-access-45rtw\") pod \"nova-metadata-0\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.648646 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.652113 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.658751 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sbzst\" (UniqueName: \"kubernetes.io/projected/34e172e1-eb30-4db4-aa0a-89e5816aa04f-kube-api-access-sbzst\") pod \"nova-cell1-novncproxy-0\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.718656 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.720094 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.721667 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-dns-svc\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.721810 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.721847 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-config\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.721869 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.721898 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x66x\" (UniqueName: \"kubernetes.io/projected/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-kube-api-access-7x66x\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.724761 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-config\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.728419 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-nb\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.728914 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-sb\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.741638 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x66x\" (UniqueName: \"kubernetes.io/projected/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-kube-api-access-7x66x\") pod \"dnsmasq-dns-8b8cf6657-ngxq7\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.753673 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.793662 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:46 crc kubenswrapper[4669]: I1210 15:40:46.941171 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.061463 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-szq9q"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.208236 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-szq9q" event={"ID":"d87be1f9-462a-4c6e-b252-11f57a2efe0f","Type":"ContainerStarted","Data":"208dd5f5f4dcab5d9ac05a24e50c4223340d4d3b8c74eaea91023c40f49c3800"} Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.243797 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.397285 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:47 crc kubenswrapper[4669]: W1210 15:40:47.401915 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeb5eafc3_8f37_46c1_bd60_77cefa7bcc8c.slice/crio-0f6e3227926372e8ca9ae486fab2fd86f95b3fefaa3c3456aaad7a071609a597 WatchSource:0}: Error finding container 0f6e3227926372e8ca9ae486fab2fd86f95b3fefaa3c3456aaad7a071609a597: Status 404 returned error can't find the container with id 0f6e3227926372e8ca9ae486fab2fd86f95b3fefaa3c3456aaad7a071609a597 Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.430605 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.465894 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6hngc"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.478999 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.489139 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6hngc"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.489345 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.489502 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.543276 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.543420 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-scripts\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.543450 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9tcb\" (UniqueName: \"kubernetes.io/projected/a2bdf613-d862-4e15-a915-32b1789bc868-kube-api-access-z9tcb\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.543612 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-config-data\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.562143 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.647276 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.647357 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-scripts\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.647409 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9tcb\" (UniqueName: \"kubernetes.io/projected/a2bdf613-d862-4e15-a915-32b1789bc868-kube-api-access-z9tcb\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.647476 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-config-data\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.652629 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.664052 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9tcb\" (UniqueName: \"kubernetes.io/projected/a2bdf613-d862-4e15-a915-32b1789bc868-kube-api-access-z9tcb\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.664350 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-scripts\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.665886 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-config-data\") pod \"nova-cell1-conductor-db-sync-6hngc\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.695428 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-ngxq7"] Dec 10 15:40:47 crc kubenswrapper[4669]: I1210 15:40:47.830114 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.218134 4669 generic.go:334] "Generic (PLEG): container finished" podID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerID="c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d" exitCode=0 Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.218274 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" event={"ID":"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780","Type":"ContainerDied","Data":"c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.218317 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" event={"ID":"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780","Type":"ContainerStarted","Data":"5a0c957155e0b6154c64d19413aac14430b55696b707610bbf0bc2a96e6ca593"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.220386 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"34e172e1-eb30-4db4-aa0a-89e5816aa04f","Type":"ContainerStarted","Data":"d87f401d4311d596c5bbdb3d7c98fc6adc1f906345c97cadfd901cd29a351cf4"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.222205 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-szq9q" event={"ID":"d87be1f9-462a-4c6e-b252-11f57a2efe0f","Type":"ContainerStarted","Data":"27b23a0805fdeb0f1bb404825704f44ea46821aa6e595ab90994396db123b2d7"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.225500 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a20aa0e-8001-4663-9bb0-a1c65b2450a5","Type":"ContainerStarted","Data":"2365f558b21fff41b2c460ca346234a1933b6c01cbca1ad7375b68abfb9c08a1"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.229794 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deddf829-5b45-4f9e-ad77-943c90e4ae60","Type":"ContainerStarted","Data":"e66d112a90ab7e02be0d9694b7076d405d8523ca6369e5221c2535f4873910d0"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.231923 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c","Type":"ContainerStarted","Data":"0f6e3227926372e8ca9ae486fab2fd86f95b3fefaa3c3456aaad7a071609a597"} Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.330590 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-szq9q" podStartSLOduration=3.330563834 podStartE2EDuration="3.330563834s" podCreationTimestamp="2025-12-10 15:40:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:48.301192538 +0000 UTC m=+1222.218139165" watchObservedRunningTime="2025-12-10 15:40:48.330563834 +0000 UTC m=+1222.247510461" Dec 10 15:40:48 crc kubenswrapper[4669]: I1210 15:40:48.342364 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6hngc"] Dec 10 15:40:48 crc kubenswrapper[4669]: W1210 15:40:48.369374 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2bdf613_d862_4e15_a915_32b1789bc868.slice/crio-6b4a1f525f2a18e772d52a8dae7bc57856cf8c41e70a97b8664336d454a626e7 WatchSource:0}: Error finding container 6b4a1f525f2a18e772d52a8dae7bc57856cf8c41e70a97b8664336d454a626e7: Status 404 returned error can't find the container with id 6b4a1f525f2a18e772d52a8dae7bc57856cf8c41e70a97b8664336d454a626e7 Dec 10 15:40:49 crc kubenswrapper[4669]: I1210 15:40:49.243598 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6hngc" event={"ID":"a2bdf613-d862-4e15-a915-32b1789bc868","Type":"ContainerStarted","Data":"1d8f70c1aba1cfbeb11b1d0af575830e40902737de62f9d8598ad9901254cf47"} Dec 10 15:40:49 crc kubenswrapper[4669]: I1210 15:40:49.244120 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6hngc" event={"ID":"a2bdf613-d862-4e15-a915-32b1789bc868","Type":"ContainerStarted","Data":"6b4a1f525f2a18e772d52a8dae7bc57856cf8c41e70a97b8664336d454a626e7"} Dec 10 15:40:49 crc kubenswrapper[4669]: I1210 15:40:49.248126 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" event={"ID":"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780","Type":"ContainerStarted","Data":"9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4"} Dec 10 15:40:49 crc kubenswrapper[4669]: I1210 15:40:49.248622 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:49 crc kubenswrapper[4669]: I1210 15:40:49.290100 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" podStartSLOduration=3.290084793 podStartE2EDuration="3.290084793s" podCreationTimestamp="2025-12-10 15:40:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:49.289436837 +0000 UTC m=+1223.206383464" watchObservedRunningTime="2025-12-10 15:40:49.290084793 +0000 UTC m=+1223.207031420" Dec 10 15:40:49 crc kubenswrapper[4669]: I1210 15:40:49.294086 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-6hngc" podStartSLOduration=2.294075401 podStartE2EDuration="2.294075401s" podCreationTimestamp="2025-12-10 15:40:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:49.264002577 +0000 UTC m=+1223.180949194" watchObservedRunningTime="2025-12-10 15:40:49.294075401 +0000 UTC m=+1223.211022028" Dec 10 15:40:50 crc kubenswrapper[4669]: I1210 15:40:50.315324 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:40:50 crc kubenswrapper[4669]: I1210 15:40:50.328895 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.279838 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"34e172e1-eb30-4db4-aa0a-89e5816aa04f","Type":"ContainerStarted","Data":"f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2"} Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.281600 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a20aa0e-8001-4663-9bb0-a1c65b2450a5","Type":"ContainerStarted","Data":"75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6"} Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.279926 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="34e172e1-eb30-4db4-aa0a-89e5816aa04f" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2" gracePeriod=30 Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.284004 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deddf829-5b45-4f9e-ad77-943c90e4ae60","Type":"ContainerStarted","Data":"f596fbb51f2a179a88451c082490d0887b740e07d523a0bb028ccd00d47857cc"} Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.284047 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deddf829-5b45-4f9e-ad77-943c90e4ae60","Type":"ContainerStarted","Data":"bb4e61b3dba556ade94d67f70b1bc59d8944b8adc8dd7a0ce42a3f4da1402620"} Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.284146 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-log" containerID="cri-o://bb4e61b3dba556ade94d67f70b1bc59d8944b8adc8dd7a0ce42a3f4da1402620" gracePeriod=30 Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.284308 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-metadata" containerID="cri-o://f596fbb51f2a179a88451c082490d0887b740e07d523a0bb028ccd00d47857cc" gracePeriod=30 Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.292694 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c","Type":"ContainerStarted","Data":"284ae53b1e774bb72009fdd09a4234705f200a798d5570a7b662a12f3122d8d6"} Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.292741 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c","Type":"ContainerStarted","Data":"0f7ea90e4bf87199a4e0ae5a6f13f1527bc1946c014bb239d8698cce3066ed8c"} Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.309631 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.5070034960000003 podStartE2EDuration="6.309612415s" podCreationTimestamp="2025-12-10 15:40:46 +0000 UTC" firstStartedPulling="2025-12-10 15:40:47.572661112 +0000 UTC m=+1221.489607739" lastFinishedPulling="2025-12-10 15:40:51.375270031 +0000 UTC m=+1225.292216658" observedRunningTime="2025-12-10 15:40:52.297298026 +0000 UTC m=+1226.214244653" watchObservedRunningTime="2025-12-10 15:40:52.309612415 +0000 UTC m=+1226.226559042" Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.329645 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.272910869 podStartE2EDuration="6.329624104s" podCreationTimestamp="2025-12-10 15:40:46 +0000 UTC" firstStartedPulling="2025-12-10 15:40:47.272615966 +0000 UTC m=+1221.189562583" lastFinishedPulling="2025-12-10 15:40:51.329329191 +0000 UTC m=+1225.246275818" observedRunningTime="2025-12-10 15:40:52.327496042 +0000 UTC m=+1226.244442679" watchObservedRunningTime="2025-12-10 15:40:52.329624104 +0000 UTC m=+1226.246570731" Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.343428 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.453957753 podStartE2EDuration="6.343411999s" podCreationTimestamp="2025-12-10 15:40:46 +0000 UTC" firstStartedPulling="2025-12-10 15:40:47.439898725 +0000 UTC m=+1221.356845352" lastFinishedPulling="2025-12-10 15:40:51.329352961 +0000 UTC m=+1225.246299598" observedRunningTime="2025-12-10 15:40:52.34303839 +0000 UTC m=+1226.259985027" watchObservedRunningTime="2025-12-10 15:40:52.343411999 +0000 UTC m=+1226.260358626" Dec 10 15:40:52 crc kubenswrapper[4669]: I1210 15:40:52.366316 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.476236797 podStartE2EDuration="6.366295298s" podCreationTimestamp="2025-12-10 15:40:46 +0000 UTC" firstStartedPulling="2025-12-10 15:40:47.440202773 +0000 UTC m=+1221.357149400" lastFinishedPulling="2025-12-10 15:40:51.330261274 +0000 UTC m=+1225.247207901" observedRunningTime="2025-12-10 15:40:52.359904812 +0000 UTC m=+1226.276851439" watchObservedRunningTime="2025-12-10 15:40:52.366295298 +0000 UTC m=+1226.283241925" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.318331 4669 generic.go:334] "Generic (PLEG): container finished" podID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerID="f596fbb51f2a179a88451c082490d0887b740e07d523a0bb028ccd00d47857cc" exitCode=0 Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.318570 4669 generic.go:334] "Generic (PLEG): container finished" podID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerID="bb4e61b3dba556ade94d67f70b1bc59d8944b8adc8dd7a0ce42a3f4da1402620" exitCode=143 Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.318470 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deddf829-5b45-4f9e-ad77-943c90e4ae60","Type":"ContainerDied","Data":"f596fbb51f2a179a88451c082490d0887b740e07d523a0bb028ccd00d47857cc"} Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.318812 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deddf829-5b45-4f9e-ad77-943c90e4ae60","Type":"ContainerDied","Data":"bb4e61b3dba556ade94d67f70b1bc59d8944b8adc8dd7a0ce42a3f4da1402620"} Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.318827 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"deddf829-5b45-4f9e-ad77-943c90e4ae60","Type":"ContainerDied","Data":"e66d112a90ab7e02be0d9694b7076d405d8523ca6369e5221c2535f4873910d0"} Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.318837 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e66d112a90ab7e02be0d9694b7076d405d8523ca6369e5221c2535f4873910d0" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.327163 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.465670 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45rtw\" (UniqueName: \"kubernetes.io/projected/deddf829-5b45-4f9e-ad77-943c90e4ae60-kube-api-access-45rtw\") pod \"deddf829-5b45-4f9e-ad77-943c90e4ae60\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.466350 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-combined-ca-bundle\") pod \"deddf829-5b45-4f9e-ad77-943c90e4ae60\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.466641 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deddf829-5b45-4f9e-ad77-943c90e4ae60-logs\") pod \"deddf829-5b45-4f9e-ad77-943c90e4ae60\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.466692 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-config-data\") pod \"deddf829-5b45-4f9e-ad77-943c90e4ae60\" (UID: \"deddf829-5b45-4f9e-ad77-943c90e4ae60\") " Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.469557 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/deddf829-5b45-4f9e-ad77-943c90e4ae60-logs" (OuterVolumeSpecName: "logs") pod "deddf829-5b45-4f9e-ad77-943c90e4ae60" (UID: "deddf829-5b45-4f9e-ad77-943c90e4ae60"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.514197 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/deddf829-5b45-4f9e-ad77-943c90e4ae60-kube-api-access-45rtw" (OuterVolumeSpecName: "kube-api-access-45rtw") pod "deddf829-5b45-4f9e-ad77-943c90e4ae60" (UID: "deddf829-5b45-4f9e-ad77-943c90e4ae60"). InnerVolumeSpecName "kube-api-access-45rtw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.527440 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "deddf829-5b45-4f9e-ad77-943c90e4ae60" (UID: "deddf829-5b45-4f9e-ad77-943c90e4ae60"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.538337 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-config-data" (OuterVolumeSpecName: "config-data") pod "deddf829-5b45-4f9e-ad77-943c90e4ae60" (UID: "deddf829-5b45-4f9e-ad77-943c90e4ae60"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.568510 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.568549 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/deddf829-5b45-4f9e-ad77-943c90e4ae60-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.568559 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/deddf829-5b45-4f9e-ad77-943c90e4ae60-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:53 crc kubenswrapper[4669]: I1210 15:40:53.568569 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45rtw\" (UniqueName: \"kubernetes.io/projected/deddf829-5b45-4f9e-ad77-943c90e4ae60-kube-api-access-45rtw\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.329113 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.381390 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.388585 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.412809 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" path="/var/lib/kubelet/pods/deddf829-5b45-4f9e-ad77-943c90e4ae60/volumes" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.422545 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:54 crc kubenswrapper[4669]: E1210 15:40:54.429773 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-log" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.429987 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-log" Dec 10 15:40:54 crc kubenswrapper[4669]: E1210 15:40:54.430106 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-metadata" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.430186 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-metadata" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.441297 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-metadata" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.441863 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="deddf829-5b45-4f9e-ad77-943c90e4ae60" containerName="nova-metadata-log" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.442859 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.442972 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.449925 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.461747 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.585475 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gppp9\" (UniqueName: \"kubernetes.io/projected/053b8e18-25a6-4c25-8e66-91a75246c0e7-kube-api-access-gppp9\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.585515 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.585565 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/053b8e18-25a6-4c25-8e66-91a75246c0e7-logs\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.585638 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.585674 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-config-data\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.687467 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/053b8e18-25a6-4c25-8e66-91a75246c0e7-logs\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.687623 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.687678 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-config-data\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.687730 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gppp9\" (UniqueName: \"kubernetes.io/projected/053b8e18-25a6-4c25-8e66-91a75246c0e7-kube-api-access-gppp9\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.687760 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.688073 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/053b8e18-25a6-4c25-8e66-91a75246c0e7-logs\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.693689 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.698706 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-config-data\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.703849 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.708839 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gppp9\" (UniqueName: \"kubernetes.io/projected/053b8e18-25a6-4c25-8e66-91a75246c0e7-kube-api-access-gppp9\") pod \"nova-metadata-0\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " pod="openstack/nova-metadata-0" Dec 10 15:40:54 crc kubenswrapper[4669]: I1210 15:40:54.766724 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:55 crc kubenswrapper[4669]: I1210 15:40:55.232464 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:55 crc kubenswrapper[4669]: W1210 15:40:55.241528 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod053b8e18_25a6_4c25_8e66_91a75246c0e7.slice/crio-96df0d5627484214008902b24932aca8283c218c63c8d7a3ac6d166645082684 WatchSource:0}: Error finding container 96df0d5627484214008902b24932aca8283c218c63c8d7a3ac6d166645082684: Status 404 returned error can't find the container with id 96df0d5627484214008902b24932aca8283c218c63c8d7a3ac6d166645082684 Dec 10 15:40:55 crc kubenswrapper[4669]: I1210 15:40:55.345121 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"053b8e18-25a6-4c25-8e66-91a75246c0e7","Type":"ContainerStarted","Data":"96df0d5627484214008902b24932aca8283c218c63c8d7a3ac6d166645082684"} Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.358250 4669 generic.go:334] "Generic (PLEG): container finished" podID="d87be1f9-462a-4c6e-b252-11f57a2efe0f" containerID="27b23a0805fdeb0f1bb404825704f44ea46821aa6e595ab90994396db123b2d7" exitCode=0 Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.358342 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-szq9q" event={"ID":"d87be1f9-462a-4c6e-b252-11f57a2efe0f","Type":"ContainerDied","Data":"27b23a0805fdeb0f1bb404825704f44ea46821aa6e595ab90994396db123b2d7"} Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.362080 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"053b8e18-25a6-4c25-8e66-91a75246c0e7","Type":"ContainerStarted","Data":"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d"} Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.362140 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"053b8e18-25a6-4c25-8e66-91a75246c0e7","Type":"ContainerStarted","Data":"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5"} Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.441059 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.44103714 podStartE2EDuration="2.44103714s" podCreationTimestamp="2025-12-10 15:40:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:40:56.409560291 +0000 UTC m=+1230.326506918" watchObservedRunningTime="2025-12-10 15:40:56.44103714 +0000 UTC m=+1230.357983767" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.535281 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.535346 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.567354 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.719517 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.719621 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.794983 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:40:56 crc kubenswrapper[4669]: I1210 15:40:56.943443 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.000928 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-5nkn8"] Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.001231 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerName="dnsmasq-dns" containerID="cri-o://5bd1c9e8f2f95af4e825f3ad173461c8b56a7f7c3daefb3ea81a420c10dff1bd" gracePeriod=10 Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.380459 4669 generic.go:334] "Generic (PLEG): container finished" podID="a2bdf613-d862-4e15-a915-32b1789bc868" containerID="1d8f70c1aba1cfbeb11b1d0af575830e40902737de62f9d8598ad9901254cf47" exitCode=0 Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.380865 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6hngc" event={"ID":"a2bdf613-d862-4e15-a915-32b1789bc868","Type":"ContainerDied","Data":"1d8f70c1aba1cfbeb11b1d0af575830e40902737de62f9d8598ad9901254cf47"} Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.393428 4669 generic.go:334] "Generic (PLEG): container finished" podID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerID="5bd1c9e8f2f95af4e825f3ad173461c8b56a7f7c3daefb3ea81a420c10dff1bd" exitCode=0 Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.393799 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" event={"ID":"a7688848-28a2-4b53-bc4e-4867d1ef570e","Type":"ContainerDied","Data":"5bd1c9e8f2f95af4e825f3ad173461c8b56a7f7c3daefb3ea81a420c10dff1bd"} Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.449789 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.529093 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.656916 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-config\") pod \"a7688848-28a2-4b53-bc4e-4867d1ef570e\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.656978 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-nb\") pod \"a7688848-28a2-4b53-bc4e-4867d1ef570e\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.656999 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-dns-svc\") pod \"a7688848-28a2-4b53-bc4e-4867d1ef570e\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.657087 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-sb\") pod \"a7688848-28a2-4b53-bc4e-4867d1ef570e\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.657256 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vkfc\" (UniqueName: \"kubernetes.io/projected/a7688848-28a2-4b53-bc4e-4867d1ef570e-kube-api-access-5vkfc\") pod \"a7688848-28a2-4b53-bc4e-4867d1ef570e\" (UID: \"a7688848-28a2-4b53-bc4e-4867d1ef570e\") " Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.709433 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7688848-28a2-4b53-bc4e-4867d1ef570e-kube-api-access-5vkfc" (OuterVolumeSpecName: "kube-api-access-5vkfc") pod "a7688848-28a2-4b53-bc4e-4867d1ef570e" (UID: "a7688848-28a2-4b53-bc4e-4867d1ef570e"). InnerVolumeSpecName "kube-api-access-5vkfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.720304 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.168:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.720516 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.168:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.760698 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vkfc\" (UniqueName: \"kubernetes.io/projected/a7688848-28a2-4b53-bc4e-4867d1ef570e-kube-api-access-5vkfc\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.765138 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-config" (OuterVolumeSpecName: "config") pod "a7688848-28a2-4b53-bc4e-4867d1ef570e" (UID: "a7688848-28a2-4b53-bc4e-4867d1ef570e"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.774803 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a7688848-28a2-4b53-bc4e-4867d1ef570e" (UID: "a7688848-28a2-4b53-bc4e-4867d1ef570e"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.821722 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a7688848-28a2-4b53-bc4e-4867d1ef570e" (UID: "a7688848-28a2-4b53-bc4e-4867d1ef570e"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.833071 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a7688848-28a2-4b53-bc4e-4867d1ef570e" (UID: "a7688848-28a2-4b53-bc4e-4867d1ef570e"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.862484 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.862510 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.862523 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.862531 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a7688848-28a2-4b53-bc4e-4867d1ef570e-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:57 crc kubenswrapper[4669]: I1210 15:40:57.868830 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.066118 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-scripts\") pod \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.066513 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-combined-ca-bundle\") pod \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.066545 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhs85\" (UniqueName: \"kubernetes.io/projected/d87be1f9-462a-4c6e-b252-11f57a2efe0f-kube-api-access-lhs85\") pod \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.066906 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-config-data\") pod \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\" (UID: \"d87be1f9-462a-4c6e-b252-11f57a2efe0f\") " Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.068906 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-scripts" (OuterVolumeSpecName: "scripts") pod "d87be1f9-462a-4c6e-b252-11f57a2efe0f" (UID: "d87be1f9-462a-4c6e-b252-11f57a2efe0f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.071403 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d87be1f9-462a-4c6e-b252-11f57a2efe0f-kube-api-access-lhs85" (OuterVolumeSpecName: "kube-api-access-lhs85") pod "d87be1f9-462a-4c6e-b252-11f57a2efe0f" (UID: "d87be1f9-462a-4c6e-b252-11f57a2efe0f"). InnerVolumeSpecName "kube-api-access-lhs85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.098447 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d87be1f9-462a-4c6e-b252-11f57a2efe0f" (UID: "d87be1f9-462a-4c6e-b252-11f57a2efe0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.101384 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-config-data" (OuterVolumeSpecName: "config-data") pod "d87be1f9-462a-4c6e-b252-11f57a2efe0f" (UID: "d87be1f9-462a-4c6e-b252-11f57a2efe0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.167674 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.167715 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhs85\" (UniqueName: \"kubernetes.io/projected/d87be1f9-462a-4c6e-b252-11f57a2efe0f-kube-api-access-lhs85\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.167727 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.167737 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d87be1f9-462a-4c6e-b252-11f57a2efe0f-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.403141 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-szq9q" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.406091 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.410554 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-szq9q" event={"ID":"d87be1f9-462a-4c6e-b252-11f57a2efe0f","Type":"ContainerDied","Data":"208dd5f5f4dcab5d9ac05a24e50c4223340d4d3b8c74eaea91023c40f49c3800"} Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.410600 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="208dd5f5f4dcab5d9ac05a24e50c4223340d4d3b8c74eaea91023c40f49c3800" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.410615 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58db5546cc-5nkn8" event={"ID":"a7688848-28a2-4b53-bc4e-4867d1ef570e","Type":"ContainerDied","Data":"9a2a74f34ccfe1ff7442ffba39a48da7897a3264fed1301791fc3b90d4ff0736"} Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.410639 4669 scope.go:117] "RemoveContainer" containerID="5bd1c9e8f2f95af4e825f3ad173461c8b56a7f7c3daefb3ea81a420c10dff1bd" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.455040 4669 scope.go:117] "RemoveContainer" containerID="c52155ee6a60d474f77068e3a9c4bb754e1376e10fb4ce2e64c8a548a5a5eddb" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.525953 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-5nkn8"] Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.551251 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58db5546cc-5nkn8"] Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.583012 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.583237 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-log" containerID="cri-o://0f7ea90e4bf87199a4e0ae5a6f13f1527bc1946c014bb239d8698cce3066ed8c" gracePeriod=30 Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.583660 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-api" containerID="cri-o://284ae53b1e774bb72009fdd09a4234705f200a798d5570a7b662a12f3122d8d6" gracePeriod=30 Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.598293 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.621709 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.621921 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-log" containerID="cri-o://aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5" gracePeriod=30 Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.622302 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-metadata" containerID="cri-o://cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d" gracePeriod=30 Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.749141 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.749195 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.749258 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.749848 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"706b5365ceb6404033f138d584d9ab3c0d60c4c6dec40f2cfffaa838889f4944"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.749897 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://706b5365ceb6404033f138d584d9ab3c0d60c4c6dec40f2cfffaa838889f4944" gracePeriod=600 Dec 10 15:40:58 crc kubenswrapper[4669]: I1210 15:40:58.924498 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.090827 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-combined-ca-bundle\") pod \"a2bdf613-d862-4e15-a915-32b1789bc868\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.091197 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-config-data\") pod \"a2bdf613-d862-4e15-a915-32b1789bc868\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.091350 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-scripts\") pod \"a2bdf613-d862-4e15-a915-32b1789bc868\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.091463 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9tcb\" (UniqueName: \"kubernetes.io/projected/a2bdf613-d862-4e15-a915-32b1789bc868-kube-api-access-z9tcb\") pod \"a2bdf613-d862-4e15-a915-32b1789bc868\" (UID: \"a2bdf613-d862-4e15-a915-32b1789bc868\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.108046 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-scripts" (OuterVolumeSpecName: "scripts") pod "a2bdf613-d862-4e15-a915-32b1789bc868" (UID: "a2bdf613-d862-4e15-a915-32b1789bc868"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.121426 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2bdf613-d862-4e15-a915-32b1789bc868-kube-api-access-z9tcb" (OuterVolumeSpecName: "kube-api-access-z9tcb") pod "a2bdf613-d862-4e15-a915-32b1789bc868" (UID: "a2bdf613-d862-4e15-a915-32b1789bc868"). InnerVolumeSpecName "kube-api-access-z9tcb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.162777 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-config-data" (OuterVolumeSpecName: "config-data") pod "a2bdf613-d862-4e15-a915-32b1789bc868" (UID: "a2bdf613-d862-4e15-a915-32b1789bc868"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.181733 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a2bdf613-d862-4e15-a915-32b1789bc868" (UID: "a2bdf613-d862-4e15-a915-32b1789bc868"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.193429 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.193486 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.193496 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a2bdf613-d862-4e15-a915-32b1789bc868-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.193505 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9tcb\" (UniqueName: \"kubernetes.io/projected/a2bdf613-d862-4e15-a915-32b1789bc868-kube-api-access-z9tcb\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.273109 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.396267 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-config-data\") pod \"053b8e18-25a6-4c25-8e66-91a75246c0e7\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.396420 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-combined-ca-bundle\") pod \"053b8e18-25a6-4c25-8e66-91a75246c0e7\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.396487 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/053b8e18-25a6-4c25-8e66-91a75246c0e7-logs\") pod \"053b8e18-25a6-4c25-8e66-91a75246c0e7\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.397126 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/053b8e18-25a6-4c25-8e66-91a75246c0e7-logs" (OuterVolumeSpecName: "logs") pod "053b8e18-25a6-4c25-8e66-91a75246c0e7" (UID: "053b8e18-25a6-4c25-8e66-91a75246c0e7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.397147 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gppp9\" (UniqueName: \"kubernetes.io/projected/053b8e18-25a6-4c25-8e66-91a75246c0e7-kube-api-access-gppp9\") pod \"053b8e18-25a6-4c25-8e66-91a75246c0e7\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.397169 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-nova-metadata-tls-certs\") pod \"053b8e18-25a6-4c25-8e66-91a75246c0e7\" (UID: \"053b8e18-25a6-4c25-8e66-91a75246c0e7\") " Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.397556 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/053b8e18-25a6-4c25-8e66-91a75246c0e7-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.405246 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/053b8e18-25a6-4c25-8e66-91a75246c0e7-kube-api-access-gppp9" (OuterVolumeSpecName: "kube-api-access-gppp9") pod "053b8e18-25a6-4c25-8e66-91a75246c0e7" (UID: "053b8e18-25a6-4c25-8e66-91a75246c0e7"). InnerVolumeSpecName "kube-api-access-gppp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.462548 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "053b8e18-25a6-4c25-8e66-91a75246c0e7" (UID: "053b8e18-25a6-4c25-8e66-91a75246c0e7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.475153 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-6hngc" event={"ID":"a2bdf613-d862-4e15-a915-32b1789bc868","Type":"ContainerDied","Data":"6b4a1f525f2a18e772d52a8dae7bc57856cf8c41e70a97b8664336d454a626e7"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.475194 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6b4a1f525f2a18e772d52a8dae7bc57856cf8c41e70a97b8664336d454a626e7" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.475234 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-6hngc" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.484714 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="706b5365ceb6404033f138d584d9ab3c0d60c4c6dec40f2cfffaa838889f4944" exitCode=0 Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.484790 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"706b5365ceb6404033f138d584d9ab3c0d60c4c6dec40f2cfffaa838889f4944"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.484816 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"7c083e375e78bcc55b89081b1b91303b8145d8a9d38c789b9d9b1d750a62bcfb"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.484833 4669 scope.go:117] "RemoveContainer" containerID="482e52d3d8c64b3e83e8a6d04d8d5d20434b81c087e0b47c0a8e6b34cdbf278e" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.497389 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "053b8e18-25a6-4c25-8e66-91a75246c0e7" (UID: "053b8e18-25a6-4c25-8e66-91a75246c0e7"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.498879 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.498906 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gppp9\" (UniqueName: \"kubernetes.io/projected/053b8e18-25a6-4c25-8e66-91a75246c0e7-kube-api-access-gppp9\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.498917 4669 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.499534 4669 generic.go:334] "Generic (PLEG): container finished" podID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerID="0f7ea90e4bf87199a4e0ae5a6f13f1527bc1946c014bb239d8698cce3066ed8c" exitCode=143 Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.499630 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c","Type":"ContainerDied","Data":"0f7ea90e4bf87199a4e0ae5a6f13f1527bc1946c014bb239d8698cce3066ed8c"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.513929 4669 generic.go:334] "Generic (PLEG): container finished" podID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerID="cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d" exitCode=0 Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.513960 4669 generic.go:334] "Generic (PLEG): container finished" podID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerID="aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5" exitCode=143 Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.514117 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" containerName="nova-scheduler-scheduler" containerID="cri-o://75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6" gracePeriod=30 Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.514154 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"053b8e18-25a6-4c25-8e66-91a75246c0e7","Type":"ContainerDied","Data":"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.514193 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"053b8e18-25a6-4c25-8e66-91a75246c0e7","Type":"ContainerDied","Data":"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.514205 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"053b8e18-25a6-4c25-8e66-91a75246c0e7","Type":"ContainerDied","Data":"96df0d5627484214008902b24932aca8283c218c63c8d7a3ac6d166645082684"} Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.514301 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.522279 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-config-data" (OuterVolumeSpecName: "config-data") pod "053b8e18-25a6-4c25-8e66-91a75246c0e7" (UID: "053b8e18-25a6-4c25-8e66-91a75246c0e7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573303 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.573754 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-metadata" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573772 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-metadata" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.573785 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2bdf613-d862-4e15-a915-32b1789bc868" containerName="nova-cell1-conductor-db-sync" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573791 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2bdf613-d862-4e15-a915-32b1789bc868" containerName="nova-cell1-conductor-db-sync" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.573804 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerName="dnsmasq-dns" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573811 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerName="dnsmasq-dns" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.573818 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d87be1f9-462a-4c6e-b252-11f57a2efe0f" containerName="nova-manage" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573824 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="d87be1f9-462a-4c6e-b252-11f57a2efe0f" containerName="nova-manage" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.573833 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-log" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573839 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-log" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.573852 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerName="init" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.573858 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerName="init" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.574033 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2bdf613-d862-4e15-a915-32b1789bc868" containerName="nova-cell1-conductor-db-sync" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.574046 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="d87be1f9-462a-4c6e-b252-11f57a2efe0f" containerName="nova-manage" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.574058 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" containerName="dnsmasq-dns" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.574071 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-metadata" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.574080 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" containerName="nova-metadata-log" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.574711 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.575382 4669 scope.go:117] "RemoveContainer" containerID="cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.580123 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.584145 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.600864 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/053b8e18-25a6-4c25-8e66-91a75246c0e7-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.619383 4669 scope.go:117] "RemoveContainer" containerID="aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.655261 4669 scope.go:117] "RemoveContainer" containerID="cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.657298 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d\": container with ID starting with cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d not found: ID does not exist" containerID="cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.657326 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d"} err="failed to get container status \"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d\": rpc error: code = NotFound desc = could not find container \"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d\": container with ID starting with cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d not found: ID does not exist" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.657348 4669 scope.go:117] "RemoveContainer" containerID="aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5" Dec 10 15:40:59 crc kubenswrapper[4669]: E1210 15:40:59.660091 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5\": container with ID starting with aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5 not found: ID does not exist" containerID="aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.660160 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5"} err="failed to get container status \"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5\": rpc error: code = NotFound desc = could not find container \"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5\": container with ID starting with aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5 not found: ID does not exist" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.660245 4669 scope.go:117] "RemoveContainer" containerID="cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.661925 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d"} err="failed to get container status \"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d\": rpc error: code = NotFound desc = could not find container \"cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d\": container with ID starting with cfe43d412db89ed9289955a08f3f92b508c1eec2571ec190c4464c9931a5265d not found: ID does not exist" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.661975 4669 scope.go:117] "RemoveContainer" containerID="aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.664410 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5"} err="failed to get container status \"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5\": rpc error: code = NotFound desc = could not find container \"aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5\": container with ID starting with aeb307efad56d25d957d7cd8420f4443a4b0f1853df625e99f9408a3e5befbf5 not found: ID does not exist" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.702056 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.702120 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.702207 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wppsd\" (UniqueName: \"kubernetes.io/projected/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-kube-api-access-wppsd\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.803447 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.803959 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.804122 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wppsd\" (UniqueName: \"kubernetes.io/projected/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-kube-api-access-wppsd\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.807194 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.808408 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.831789 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wppsd\" (UniqueName: \"kubernetes.io/projected/c288c8b0-b24e-40e8-8b75-79887e1d9ed4-kube-api-access-wppsd\") pod \"nova-cell1-conductor-0\" (UID: \"c288c8b0-b24e-40e8-8b75-79887e1d9ed4\") " pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.851489 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.861783 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.884971 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.886384 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.891395 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.892561 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.899460 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 15:40:59 crc kubenswrapper[4669]: I1210 15:40:59.911477 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.008125 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6pt87\" (UniqueName: \"kubernetes.io/projected/497fab74-8305-45ea-9de7-45b02f3efafb-kube-api-access-6pt87\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.008222 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-config-data\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.008253 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/497fab74-8305-45ea-9de7-45b02f3efafb-logs\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.008285 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.008329 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.110131 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.110229 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6pt87\" (UniqueName: \"kubernetes.io/projected/497fab74-8305-45ea-9de7-45b02f3efafb-kube-api-access-6pt87\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.110277 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-config-data\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.110307 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/497fab74-8305-45ea-9de7-45b02f3efafb-logs\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.110335 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.113973 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/497fab74-8305-45ea-9de7-45b02f3efafb-logs\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.115789 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.117491 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-config-data\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.117840 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.138511 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6pt87\" (UniqueName: \"kubernetes.io/projected/497fab74-8305-45ea-9de7-45b02f3efafb-kube-api-access-6pt87\") pod \"nova-metadata-0\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.204520 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.413193 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="053b8e18-25a6-4c25-8e66-91a75246c0e7" path="/var/lib/kubelet/pods/053b8e18-25a6-4c25-8e66-91a75246c0e7/volumes" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.415196 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7688848-28a2-4b53-bc4e-4867d1ef570e" path="/var/lib/kubelet/pods/a7688848-28a2-4b53-bc4e-4867d1ef570e/volumes" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.671607 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.675975 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 15:41:00 crc kubenswrapper[4669]: I1210 15:41:00.786203 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:01 crc kubenswrapper[4669]: E1210 15:41:01.539540 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 15:41:01 crc kubenswrapper[4669]: E1210 15:41:01.546402 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 15:41:01 crc kubenswrapper[4669]: E1210 15:41:01.548511 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 15:41:01 crc kubenswrapper[4669]: E1210 15:41:01.548572 4669 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" containerName="nova-scheduler-scheduler" Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.548758 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c288c8b0-b24e-40e8-8b75-79887e1d9ed4","Type":"ContainerStarted","Data":"1b686976b2f2da21ee1687b66160f0506a1ee4271cd3085f427b70e027db9009"} Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.548803 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"c288c8b0-b24e-40e8-8b75-79887e1d9ed4","Type":"ContainerStarted","Data":"1680133e5462b9160327df73bbcf096c1767ae10339e10a0409cf8dd9212b24b"} Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.548845 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.552261 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"497fab74-8305-45ea-9de7-45b02f3efafb","Type":"ContainerStarted","Data":"37879abaca4b6ef6ca66e042310f69c40da482caa62ef5927dbeb3e7b4ed2ca2"} Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.552295 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"497fab74-8305-45ea-9de7-45b02f3efafb","Type":"ContainerStarted","Data":"48528e16997e1a73f4287b85dce77d7c03a6c812c4b23dffe16b2b642361b8d7"} Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.552309 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"497fab74-8305-45ea-9de7-45b02f3efafb","Type":"ContainerStarted","Data":"e401a76a88d85c25832e5b44cc19c7bf6e3dc955aca2225ba5146cc84ebb9456"} Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.578423 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.5783955560000003 podStartE2EDuration="2.578395556s" podCreationTimestamp="2025-12-10 15:40:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:01.570317725 +0000 UTC m=+1235.487264352" watchObservedRunningTime="2025-12-10 15:41:01.578395556 +0000 UTC m=+1235.495342183" Dec 10 15:41:01 crc kubenswrapper[4669]: I1210 15:41:01.598899 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.598880544 podStartE2EDuration="2.598880544s" podCreationTimestamp="2025-12-10 15:40:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:01.591509331 +0000 UTC m=+1235.508455958" watchObservedRunningTime="2025-12-10 15:41:01.598880544 +0000 UTC m=+1235.515827191" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.569900 4669 generic.go:334] "Generic (PLEG): container finished" podID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" containerID="75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6" exitCode=0 Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.570074 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a20aa0e-8001-4663-9bb0-a1c65b2450a5","Type":"ContainerDied","Data":"75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6"} Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.570558 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8a20aa0e-8001-4663-9bb0-a1c65b2450a5","Type":"ContainerDied","Data":"2365f558b21fff41b2c460ca346234a1933b6c01cbca1ad7375b68abfb9c08a1"} Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.570575 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2365f558b21fff41b2c460ca346234a1933b6c01cbca1ad7375b68abfb9c08a1" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.639532 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.728509 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.728715 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="c20308b7-707d-45bc-bda1-b33edf9e2d09" containerName="kube-state-metrics" containerID="cri-o://f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f" gracePeriod=30 Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.776293 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-config-data\") pod \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.776642 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-combined-ca-bundle\") pod \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.776723 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qn984\" (UniqueName: \"kubernetes.io/projected/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-kube-api-access-qn984\") pod \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\" (UID: \"8a20aa0e-8001-4663-9bb0-a1c65b2450a5\") " Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.800454 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-kube-api-access-qn984" (OuterVolumeSpecName: "kube-api-access-qn984") pod "8a20aa0e-8001-4663-9bb0-a1c65b2450a5" (UID: "8a20aa0e-8001-4663-9bb0-a1c65b2450a5"). InnerVolumeSpecName "kube-api-access-qn984". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.824415 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8a20aa0e-8001-4663-9bb0-a1c65b2450a5" (UID: "8a20aa0e-8001-4663-9bb0-a1c65b2450a5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.845627 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/kube-state-metrics-0" podUID="c20308b7-707d-45bc-bda1-b33edf9e2d09" containerName="kube-state-metrics" probeResult="failure" output="Get \"http://10.217.0.103:8081/readyz\": dial tcp 10.217.0.103:8081: connect: connection refused" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.853905 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-config-data" (OuterVolumeSpecName: "config-data") pod "8a20aa0e-8001-4663-9bb0-a1c65b2450a5" (UID: "8a20aa0e-8001-4663-9bb0-a1c65b2450a5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.879636 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.879678 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:03 crc kubenswrapper[4669]: I1210 15:41:03.879689 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qn984\" (UniqueName: \"kubernetes.io/projected/8a20aa0e-8001-4663-9bb0-a1c65b2450a5-kube-api-access-qn984\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.154530 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.290680 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p5d9f\" (UniqueName: \"kubernetes.io/projected/c20308b7-707d-45bc-bda1-b33edf9e2d09-kube-api-access-p5d9f\") pod \"c20308b7-707d-45bc-bda1-b33edf9e2d09\" (UID: \"c20308b7-707d-45bc-bda1-b33edf9e2d09\") " Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.294759 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c20308b7-707d-45bc-bda1-b33edf9e2d09-kube-api-access-p5d9f" (OuterVolumeSpecName: "kube-api-access-p5d9f") pod "c20308b7-707d-45bc-bda1-b33edf9e2d09" (UID: "c20308b7-707d-45bc-bda1-b33edf9e2d09"). InnerVolumeSpecName "kube-api-access-p5d9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.394500 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p5d9f\" (UniqueName: \"kubernetes.io/projected/c20308b7-707d-45bc-bda1-b33edf9e2d09-kube-api-access-p5d9f\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.593901 4669 generic.go:334] "Generic (PLEG): container finished" podID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerID="284ae53b1e774bb72009fdd09a4234705f200a798d5570a7b662a12f3122d8d6" exitCode=0 Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.593994 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c","Type":"ContainerDied","Data":"284ae53b1e774bb72009fdd09a4234705f200a798d5570a7b662a12f3122d8d6"} Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.594021 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c","Type":"ContainerDied","Data":"0f6e3227926372e8ca9ae486fab2fd86f95b3fefaa3c3456aaad7a071609a597"} Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.594032 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f6e3227926372e8ca9ae486fab2fd86f95b3fefaa3c3456aaad7a071609a597" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.595995 4669 generic.go:334] "Generic (PLEG): container finished" podID="c20308b7-707d-45bc-bda1-b33edf9e2d09" containerID="f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f" exitCode=2 Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.596108 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.596801 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c20308b7-707d-45bc-bda1-b33edf9e2d09","Type":"ContainerDied","Data":"f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f"} Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.596858 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c20308b7-707d-45bc-bda1-b33edf9e2d09","Type":"ContainerDied","Data":"f9bd99141a91dbd1585909133cbe7132afb5869623ef65d51c57eb886a76a0d8"} Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.596892 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.596908 4669 scope.go:117] "RemoveContainer" containerID="f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.625594 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.632470 4669 scope.go:117] "RemoveContainer" containerID="f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f" Dec 10 15:41:04 crc kubenswrapper[4669]: E1210 15:41:04.637620 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f\": container with ID starting with f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f not found: ID does not exist" containerID="f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.637670 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f"} err="failed to get container status \"f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f\": rpc error: code = NotFound desc = could not find container \"f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f\": container with ID starting with f082719408a449a0ec59957f44c948b8de953d4331c76579f18b98f947c0550f not found: ID does not exist" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.642655 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.662066 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.686420 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.706417 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716066 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: E1210 15:41:04.716451 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" containerName="nova-scheduler-scheduler" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716469 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" containerName="nova-scheduler-scheduler" Dec 10 15:41:04 crc kubenswrapper[4669]: E1210 15:41:04.716478 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c20308b7-707d-45bc-bda1-b33edf9e2d09" containerName="kube-state-metrics" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716484 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c20308b7-707d-45bc-bda1-b33edf9e2d09" containerName="kube-state-metrics" Dec 10 15:41:04 crc kubenswrapper[4669]: E1210 15:41:04.716496 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-api" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716504 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-api" Dec 10 15:41:04 crc kubenswrapper[4669]: E1210 15:41:04.716518 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-log" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716524 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-log" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716674 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" containerName="nova-scheduler-scheduler" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716695 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-log" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716705 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c20308b7-707d-45bc-bda1-b33edf9e2d09" containerName="kube-state-metrics" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.716713 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" containerName="nova-api-api" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.717180 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.717932 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.718373 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.734739 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.734934 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.735135 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.736367 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-logs\") pod \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.736390 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-combined-ca-bundle\") pod \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.736588 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-config-data\") pod \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.736658 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7qwv\" (UniqueName: \"kubernetes.io/projected/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-kube-api-access-z7qwv\") pod \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\" (UID: \"eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c\") " Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.736923 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-config-data\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.736989 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.737024 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.737044 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.737072 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5g4x\" (UniqueName: \"kubernetes.io/projected/05a43950-03b0-4075-a8a5-d157dd6367db-kube-api-access-j5g4x\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.737091 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.737117 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qb95\" (UniqueName: \"kubernetes.io/projected/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-kube-api-access-2qb95\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.738080 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-logs" (OuterVolumeSpecName: "logs") pod "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" (UID: "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.752665 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-kube-api-access-z7qwv" (OuterVolumeSpecName: "kube-api-access-z7qwv") pod "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" (UID: "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c"). InnerVolumeSpecName "kube-api-access-z7qwv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.774370 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.793355 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-config-data" (OuterVolumeSpecName: "config-data") pod "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" (UID: "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.799994 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" (UID: "eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.806672 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.838601 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.838886 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.838986 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839078 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5g4x\" (UniqueName: \"kubernetes.io/projected/05a43950-03b0-4075-a8a5-d157dd6367db-kube-api-access-j5g4x\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839165 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839266 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qb95\" (UniqueName: \"kubernetes.io/projected/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-kube-api-access-2qb95\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839393 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-config-data\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839516 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839586 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839648 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.839704 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7qwv\" (UniqueName: \"kubernetes.io/projected/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c-kube-api-access-z7qwv\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.843566 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.843828 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-config-data\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.845078 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.845443 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.853134 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05a43950-03b0-4075-a8a5-d157dd6367db-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.856815 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5g4x\" (UniqueName: \"kubernetes.io/projected/05a43950-03b0-4075-a8a5-d157dd6367db-kube-api-access-j5g4x\") pod \"kube-state-metrics-0\" (UID: \"05a43950-03b0-4075-a8a5-d157dd6367db\") " pod="openstack/kube-state-metrics-0" Dec 10 15:41:04 crc kubenswrapper[4669]: I1210 15:41:04.857696 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qb95\" (UniqueName: \"kubernetes.io/projected/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-kube-api-access-2qb95\") pod \"nova-scheduler-0\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.050631 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.115269 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.148746 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.149059 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-central-agent" containerID="cri-o://3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b" gracePeriod=30 Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.149112 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="sg-core" containerID="cri-o://3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150" gracePeriod=30 Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.149111 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="proxy-httpd" containerID="cri-o://3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45" gracePeriod=30 Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.149190 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-notification-agent" containerID="cri-o://7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3" gracePeriod=30 Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.206737 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.207814 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.582699 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.601510 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.630693 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"05a43950-03b0-4075-a8a5-d157dd6367db","Type":"ContainerStarted","Data":"4dd778898ec32c49ce97a94412751f16379b571be11aed0ce95bcce2dbca3c00"} Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.636701 4669 generic.go:334] "Generic (PLEG): container finished" podID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerID="3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45" exitCode=0 Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.636726 4669 generic.go:334] "Generic (PLEG): container finished" podID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerID="3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150" exitCode=2 Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.637540 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerDied","Data":"3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45"} Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.637566 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerDied","Data":"3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150"} Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.637606 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.678192 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.699773 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.717072 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.718689 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.727558 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.727966 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: W1210 15:41:05.772449 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c6f4c48_e20a_4cdd_a564_29bfe748af7d.slice/crio-a5969c3c1c86de0176873e1b62eaf3f86fb4e8057b58f3cf65992b6e121eab8f WatchSource:0}: Error finding container a5969c3c1c86de0176873e1b62eaf3f86fb4e8057b58f3cf65992b6e121eab8f: Status 404 returned error can't find the container with id a5969c3c1c86de0176873e1b62eaf3f86fb4e8057b58f3cf65992b6e121eab8f Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.772979 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.871365 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.871751 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kw78\" (UniqueName: \"kubernetes.io/projected/5609c38e-ed5f-46ca-916d-b5a4bad23acf-kube-api-access-5kw78\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.871894 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-config-data\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.871974 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5609c38e-ed5f-46ca-916d-b5a4bad23acf-logs\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.974224 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5609c38e-ed5f-46ca-916d-b5a4bad23acf-logs\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.974316 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.974381 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kw78\" (UniqueName: \"kubernetes.io/projected/5609c38e-ed5f-46ca-916d-b5a4bad23acf-kube-api-access-5kw78\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.974466 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-config-data\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.974957 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5609c38e-ed5f-46ca-916d-b5a4bad23acf-logs\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.978769 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.978892 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-config-data\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:05 crc kubenswrapper[4669]: I1210 15:41:05.992019 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kw78\" (UniqueName: \"kubernetes.io/projected/5609c38e-ed5f-46ca-916d-b5a4bad23acf-kube-api-access-5kw78\") pod \"nova-api-0\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " pod="openstack/nova-api-0" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.047451 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.420167 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a20aa0e-8001-4663-9bb0-a1c65b2450a5" path="/var/lib/kubelet/pods/8a20aa0e-8001-4663-9bb0-a1c65b2450a5/volumes" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.422204 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c20308b7-707d-45bc-bda1-b33edf9e2d09" path="/var/lib/kubelet/pods/c20308b7-707d-45bc-bda1-b33edf9e2d09/volumes" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.423111 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c" path="/var/lib/kubelet/pods/eb5eafc3-8f37-46c1-bd60-77cefa7bcc8c/volumes" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.602976 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:06 crc kubenswrapper[4669]: W1210 15:41:06.604890 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5609c38e_ed5f_46ca_916d_b5a4bad23acf.slice/crio-a9e2eb36a66cecb63ee0db2d63e869a00c54dd74f059efb2d07a055cd87e7195 WatchSource:0}: Error finding container a9e2eb36a66cecb63ee0db2d63e869a00c54dd74f059efb2d07a055cd87e7195: Status 404 returned error can't find the container with id a9e2eb36a66cecb63ee0db2d63e869a00c54dd74f059efb2d07a055cd87e7195 Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.658400 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"05a43950-03b0-4075-a8a5-d157dd6367db","Type":"ContainerStarted","Data":"0e9eff9d41841de15b1a1e4ec0fb588d30006e4cbc447be4c2cdcee02c2bdba2"} Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.658829 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.666887 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5609c38e-ed5f-46ca-916d-b5a4bad23acf","Type":"ContainerStarted","Data":"a9e2eb36a66cecb63ee0db2d63e869a00c54dd74f059efb2d07a055cd87e7195"} Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.668404 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c6f4c48-e20a-4cdd-a564-29bfe748af7d","Type":"ContainerStarted","Data":"dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03"} Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.668428 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c6f4c48-e20a-4cdd-a564-29bfe748af7d","Type":"ContainerStarted","Data":"a5969c3c1c86de0176873e1b62eaf3f86fb4e8057b58f3cf65992b6e121eab8f"} Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.679824 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.215977609 podStartE2EDuration="2.679804511s" podCreationTimestamp="2025-12-10 15:41:04 +0000 UTC" firstStartedPulling="2025-12-10 15:41:05.60133123 +0000 UTC m=+1239.518277857" lastFinishedPulling="2025-12-10 15:41:06.065158132 +0000 UTC m=+1239.982104759" observedRunningTime="2025-12-10 15:41:06.67452531 +0000 UTC m=+1240.591471937" watchObservedRunningTime="2025-12-10 15:41:06.679804511 +0000 UTC m=+1240.596751138" Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.685519 4669 generic.go:334] "Generic (PLEG): container finished" podID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerID="3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b" exitCode=0 Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.685577 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerDied","Data":"3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b"} Dec 10 15:41:06 crc kubenswrapper[4669]: I1210 15:41:06.707422 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.707402944 podStartE2EDuration="2.707402944s" podCreationTimestamp="2025-12-10 15:41:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:06.697875899 +0000 UTC m=+1240.614822526" watchObservedRunningTime="2025-12-10 15:41:06.707402944 +0000 UTC m=+1240.624349571" Dec 10 15:41:07 crc kubenswrapper[4669]: I1210 15:41:07.702293 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5609c38e-ed5f-46ca-916d-b5a4bad23acf","Type":"ContainerStarted","Data":"7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862"} Dec 10 15:41:07 crc kubenswrapper[4669]: I1210 15:41:07.702683 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5609c38e-ed5f-46ca-916d-b5a4bad23acf","Type":"ContainerStarted","Data":"a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2"} Dec 10 15:41:07 crc kubenswrapper[4669]: I1210 15:41:07.726914 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.726890624 podStartE2EDuration="2.726890624s" podCreationTimestamp="2025-12-10 15:41:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:07.726243678 +0000 UTC m=+1241.643190315" watchObservedRunningTime="2025-12-10 15:41:07.726890624 +0000 UTC m=+1241.643837261" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.711995 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.738329 4669 generic.go:334] "Generic (PLEG): container finished" podID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerID="7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3" exitCode=0 Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.738377 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerDied","Data":"7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3"} Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.738409 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"cfb2ed8e-311c-4857-8065-57d98d5c7031","Type":"ContainerDied","Data":"db2987efbb73ef3ab9353f33fa1ac70c6468ed72860a74906df9b9165653e253"} Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.738427 4669 scope.go:117] "RemoveContainer" containerID="3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.738638 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.762444 4669 scope.go:117] "RemoveContainer" containerID="3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.782027 4669 scope.go:117] "RemoveContainer" containerID="7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.805073 4669 scope.go:117] "RemoveContainer" containerID="3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.830432 4669 scope.go:117] "RemoveContainer" containerID="3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45" Dec 10 15:41:09 crc kubenswrapper[4669]: E1210 15:41:09.830853 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45\": container with ID starting with 3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45 not found: ID does not exist" containerID="3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.830885 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45"} err="failed to get container status \"3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45\": rpc error: code = NotFound desc = could not find container \"3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45\": container with ID starting with 3ca1cd69e7525f678c1222e01107e019dc9e25cf1cf1bfba851c62fc10ea4f45 not found: ID does not exist" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.830912 4669 scope.go:117] "RemoveContainer" containerID="3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150" Dec 10 15:41:09 crc kubenswrapper[4669]: E1210 15:41:09.831360 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150\": container with ID starting with 3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150 not found: ID does not exist" containerID="3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.831392 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150"} err="failed to get container status \"3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150\": rpc error: code = NotFound desc = could not find container \"3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150\": container with ID starting with 3d070ef66ff4bff462d124e9072a0ffde3feec7f3c73093e8661c3cff0ef9150 not found: ID does not exist" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.831428 4669 scope.go:117] "RemoveContainer" containerID="7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3" Dec 10 15:41:09 crc kubenswrapper[4669]: E1210 15:41:09.831813 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3\": container with ID starting with 7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3 not found: ID does not exist" containerID="7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.831833 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3"} err="failed to get container status \"7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3\": rpc error: code = NotFound desc = could not find container \"7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3\": container with ID starting with 7d2c4ac005818cc866396d67a5f829eac49df401120820edfee47482e98e4cd3 not found: ID does not exist" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.831845 4669 scope.go:117] "RemoveContainer" containerID="3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b" Dec 10 15:41:09 crc kubenswrapper[4669]: E1210 15:41:09.832083 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b\": container with ID starting with 3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b not found: ID does not exist" containerID="3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.832101 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b"} err="failed to get container status \"3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b\": rpc error: code = NotFound desc = could not find container \"3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b\": container with ID starting with 3e5087dba89e9e2f77eb6b7a1c51655298a2c5e82f5f05431a784b2d09a1070b not found: ID does not exist" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.859708 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftdmw\" (UniqueName: \"kubernetes.io/projected/cfb2ed8e-311c-4857-8065-57d98d5c7031-kube-api-access-ftdmw\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.859839 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-run-httpd\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.859870 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-log-httpd\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.859904 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-combined-ca-bundle\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.859951 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-config-data\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.860036 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-scripts\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.860051 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-sg-core-conf-yaml\") pod \"cfb2ed8e-311c-4857-8065-57d98d5c7031\" (UID: \"cfb2ed8e-311c-4857-8065-57d98d5c7031\") " Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.861423 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.861776 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.870493 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-scripts" (OuterVolumeSpecName: "scripts") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.871965 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfb2ed8e-311c-4857-8065-57d98d5c7031-kube-api-access-ftdmw" (OuterVolumeSpecName: "kube-api-access-ftdmw") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "kube-api-access-ftdmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.888888 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.922551 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.935916 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.953904 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-config-data" (OuterVolumeSpecName: "config-data") pod "cfb2ed8e-311c-4857-8065-57d98d5c7031" (UID: "cfb2ed8e-311c-4857-8065-57d98d5c7031"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962091 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962161 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/cfb2ed8e-311c-4857-8065-57d98d5c7031-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962179 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962193 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962203 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962485 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/cfb2ed8e-311c-4857-8065-57d98d5c7031-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:09 crc kubenswrapper[4669]: I1210 15:41:09.962499 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftdmw\" (UniqueName: \"kubernetes.io/projected/cfb2ed8e-311c-4857-8065-57d98d5c7031-kube-api-access-ftdmw\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.074996 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.085307 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.094085 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:10 crc kubenswrapper[4669]: E1210 15:41:10.094830 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-notification-agent" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.094851 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-notification-agent" Dec 10 15:41:10 crc kubenswrapper[4669]: E1210 15:41:10.094872 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-central-agent" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.094880 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-central-agent" Dec 10 15:41:10 crc kubenswrapper[4669]: E1210 15:41:10.094891 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="sg-core" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.094896 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="sg-core" Dec 10 15:41:10 crc kubenswrapper[4669]: E1210 15:41:10.094909 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="proxy-httpd" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.094914 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="proxy-httpd" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.095080 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-central-agent" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.095095 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="proxy-httpd" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.095107 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="ceilometer-notification-agent" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.095123 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" containerName="sg-core" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.098527 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.101312 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.101498 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.101727 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.106513 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.127256 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.205710 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.206040 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273430 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-run-httpd\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273497 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-scripts\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273526 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6cvkz\" (UniqueName: \"kubernetes.io/projected/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-kube-api-access-6cvkz\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273601 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273684 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-config-data\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273737 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273763 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.273805 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-log-httpd\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.378963 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379012 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379066 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-log-httpd\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379109 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-run-httpd\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379145 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-scripts\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379165 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6cvkz\" (UniqueName: \"kubernetes.io/projected/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-kube-api-access-6cvkz\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379277 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.379358 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-config-data\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.380687 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-log-httpd\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.380966 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-run-httpd\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.385178 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-config-data\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.385509 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-scripts\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.387103 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.387737 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.388337 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.403893 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6cvkz\" (UniqueName: \"kubernetes.io/projected/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-kube-api-access-6cvkz\") pod \"ceilometer-0\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " pod="openstack/ceilometer-0" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.423856 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfb2ed8e-311c-4857-8065-57d98d5c7031" path="/var/lib/kubelet/pods/cfb2ed8e-311c-4857-8065-57d98d5c7031/volumes" Dec 10 15:41:10 crc kubenswrapper[4669]: I1210 15:41:10.428441 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:11 crc kubenswrapper[4669]: I1210 15:41:11.101764 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:11 crc kubenswrapper[4669]: I1210 15:41:11.224426 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.175:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:11 crc kubenswrapper[4669]: I1210 15:41:11.224434 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.175:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:11 crc kubenswrapper[4669]: I1210 15:41:11.789660 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerStarted","Data":"2a4abe318a31296ffae281264570a9aec149cb34b3030cc804db280061accf06"} Dec 10 15:41:12 crc kubenswrapper[4669]: I1210 15:41:12.804281 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerStarted","Data":"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c"} Dec 10 15:41:14 crc kubenswrapper[4669]: I1210 15:41:14.848826 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerStarted","Data":"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971"} Dec 10 15:41:15 crc kubenswrapper[4669]: I1210 15:41:15.060937 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 10 15:41:15 crc kubenswrapper[4669]: I1210 15:41:15.117076 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 15:41:15 crc kubenswrapper[4669]: I1210 15:41:15.152492 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 15:41:15 crc kubenswrapper[4669]: I1210 15:41:15.864523 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerStarted","Data":"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef"} Dec 10 15:41:15 crc kubenswrapper[4669]: I1210 15:41:15.894821 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 15:41:16 crc kubenswrapper[4669]: I1210 15:41:16.056590 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 15:41:16 crc kubenswrapper[4669]: I1210 15:41:16.056642 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 15:41:17 crc kubenswrapper[4669]: I1210 15:41:17.138451 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.178:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:17 crc kubenswrapper[4669]: I1210 15:41:17.138443 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.178:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:17 crc kubenswrapper[4669]: I1210 15:41:17.882444 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerStarted","Data":"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e"} Dec 10 15:41:17 crc kubenswrapper[4669]: I1210 15:41:17.882951 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:41:20 crc kubenswrapper[4669]: I1210 15:41:20.213080 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 15:41:20 crc kubenswrapper[4669]: I1210 15:41:20.213706 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 15:41:20 crc kubenswrapper[4669]: I1210 15:41:20.222394 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 15:41:20 crc kubenswrapper[4669]: I1210 15:41:20.222901 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 15:41:20 crc kubenswrapper[4669]: I1210 15:41:20.238587 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.610092274 podStartE2EDuration="10.238567038s" podCreationTimestamp="2025-12-10 15:41:10 +0000 UTC" firstStartedPulling="2025-12-10 15:41:11.097117456 +0000 UTC m=+1245.014064083" lastFinishedPulling="2025-12-10 15:41:16.72559222 +0000 UTC m=+1250.642538847" observedRunningTime="2025-12-10 15:41:17.913836709 +0000 UTC m=+1251.830783356" watchObservedRunningTime="2025-12-10 15:41:20.238567038 +0000 UTC m=+1254.155513675" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.699959 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.864171 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sbzst\" (UniqueName: \"kubernetes.io/projected/34e172e1-eb30-4db4-aa0a-89e5816aa04f-kube-api-access-sbzst\") pod \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.864543 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-combined-ca-bundle\") pod \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.864690 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-config-data\") pod \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\" (UID: \"34e172e1-eb30-4db4-aa0a-89e5816aa04f\") " Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.869303 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34e172e1-eb30-4db4-aa0a-89e5816aa04f-kube-api-access-sbzst" (OuterVolumeSpecName: "kube-api-access-sbzst") pod "34e172e1-eb30-4db4-aa0a-89e5816aa04f" (UID: "34e172e1-eb30-4db4-aa0a-89e5816aa04f"). InnerVolumeSpecName "kube-api-access-sbzst". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.889824 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "34e172e1-eb30-4db4-aa0a-89e5816aa04f" (UID: "34e172e1-eb30-4db4-aa0a-89e5816aa04f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.890730 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-config-data" (OuterVolumeSpecName: "config-data") pod "34e172e1-eb30-4db4-aa0a-89e5816aa04f" (UID: "34e172e1-eb30-4db4-aa0a-89e5816aa04f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.932473 4669 generic.go:334] "Generic (PLEG): container finished" podID="34e172e1-eb30-4db4-aa0a-89e5816aa04f" containerID="f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2" exitCode=137 Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.932512 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"34e172e1-eb30-4db4-aa0a-89e5816aa04f","Type":"ContainerDied","Data":"f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2"} Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.932542 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"34e172e1-eb30-4db4-aa0a-89e5816aa04f","Type":"ContainerDied","Data":"d87f401d4311d596c5bbdb3d7c98fc6adc1f906345c97cadfd901cd29a351cf4"} Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.932548 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.932560 4669 scope.go:117] "RemoveContainer" containerID="f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.967863 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.968088 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/34e172e1-eb30-4db4-aa0a-89e5816aa04f-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.968149 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sbzst\" (UniqueName: \"kubernetes.io/projected/34e172e1-eb30-4db4-aa0a-89e5816aa04f-kube-api-access-sbzst\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.997617 4669 scope.go:117] "RemoveContainer" containerID="f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2" Dec 10 15:41:22 crc kubenswrapper[4669]: E1210 15:41:22.998661 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2\": container with ID starting with f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2 not found: ID does not exist" containerID="f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2" Dec 10 15:41:22 crc kubenswrapper[4669]: I1210 15:41:22.998698 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2"} err="failed to get container status \"f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2\": rpc error: code = NotFound desc = could not find container \"f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2\": container with ID starting with f13516c468368b825a464b60043b774852aa294b3301219785c94aa03e6292f2 not found: ID does not exist" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.041137 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.063437 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.070266 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:41:23 crc kubenswrapper[4669]: E1210 15:41:23.070747 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34e172e1-eb30-4db4-aa0a-89e5816aa04f" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.070779 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="34e172e1-eb30-4db4-aa0a-89e5816aa04f" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.070932 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="34e172e1-eb30-4db4-aa0a-89e5816aa04f" containerName="nova-cell1-novncproxy-novncproxy" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.071620 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.073632 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.075823 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.077804 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.079297 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.171797 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9jcp\" (UniqueName: \"kubernetes.io/projected/1ea5cb75-2c66-4b3b-b38b-392e22552d43-kube-api-access-k9jcp\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.172366 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.172673 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.172842 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.173009 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.274650 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.274776 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.274808 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.274832 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.274859 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9jcp\" (UniqueName: \"kubernetes.io/projected/1ea5cb75-2c66-4b3b-b38b-392e22552d43-kube-api-access-k9jcp\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.279271 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.280409 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.282583 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.296427 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9jcp\" (UniqueName: \"kubernetes.io/projected/1ea5cb75-2c66-4b3b-b38b-392e22552d43-kube-api-access-k9jcp\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.305476 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1ea5cb75-2c66-4b3b-b38b-392e22552d43-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"1ea5cb75-2c66-4b3b-b38b-392e22552d43\") " pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.400823 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.903367 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 10 15:41:23 crc kubenswrapper[4669]: I1210 15:41:23.947304 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1ea5cb75-2c66-4b3b-b38b-392e22552d43","Type":"ContainerStarted","Data":"e5c45f199f2460351bc3bc4e5b1ed8e3ea5b31ecf4a203d676052763bc3a3d95"} Dec 10 15:41:24 crc kubenswrapper[4669]: I1210 15:41:24.415811 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34e172e1-eb30-4db4-aa0a-89e5816aa04f" path="/var/lib/kubelet/pods/34e172e1-eb30-4db4-aa0a-89e5816aa04f/volumes" Dec 10 15:41:24 crc kubenswrapper[4669]: I1210 15:41:24.958970 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"1ea5cb75-2c66-4b3b-b38b-392e22552d43","Type":"ContainerStarted","Data":"a3e3141fc5e98d399fd988fbfe0e1727a97fea9ab3b9a840eecdacc210bbb043"} Dec 10 15:41:24 crc kubenswrapper[4669]: I1210 15:41:24.985433 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.985412168 podStartE2EDuration="2.985412168s" podCreationTimestamp="2025-12-10 15:41:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:24.977497082 +0000 UTC m=+1258.894443719" watchObservedRunningTime="2025-12-10 15:41:24.985412168 +0000 UTC m=+1258.902358805" Dec 10 15:41:26 crc kubenswrapper[4669]: I1210 15:41:26.052794 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 15:41:26 crc kubenswrapper[4669]: I1210 15:41:26.053538 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 15:41:26 crc kubenswrapper[4669]: I1210 15:41:26.053671 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 15:41:26 crc kubenswrapper[4669]: I1210 15:41:26.057455 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 15:41:26 crc kubenswrapper[4669]: I1210 15:41:26.980326 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 15:41:26 crc kubenswrapper[4669]: I1210 15:41:26.986262 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.233067 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-b5wjd"] Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.237137 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.260005 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-b5wjd"] Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.357491 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.357591 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz57t\" (UniqueName: \"kubernetes.io/projected/bfde7108-9097-4b59-b5a2-24bfa50f9884-kube-api-access-wz57t\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.357650 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.357763 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-config\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.357784 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.459109 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz57t\" (UniqueName: \"kubernetes.io/projected/bfde7108-9097-4b59-b5a2-24bfa50f9884-kube-api-access-wz57t\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.459172 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.459256 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-config\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.459274 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.459341 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.460188 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-sb\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.462021 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-config\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.462106 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-nb\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.462263 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-dns-svc\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.480755 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz57t\" (UniqueName: \"kubernetes.io/projected/bfde7108-9097-4b59-b5a2-24bfa50f9884-kube-api-access-wz57t\") pod \"dnsmasq-dns-68d4b6d797-b5wjd\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:27 crc kubenswrapper[4669]: I1210 15:41:27.578598 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:28 crc kubenswrapper[4669]: I1210 15:41:28.030673 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-b5wjd"] Dec 10 15:41:28 crc kubenswrapper[4669]: I1210 15:41:28.408602 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.006921 4669 generic.go:334] "Generic (PLEG): container finished" podID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerID="5ff8ab0470627842478306d2b84e3ebc29e317b7a3ada9b0a22c4668746502b5" exitCode=0 Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.007159 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" event={"ID":"bfde7108-9097-4b59-b5a2-24bfa50f9884","Type":"ContainerDied","Data":"5ff8ab0470627842478306d2b84e3ebc29e317b7a3ada9b0a22c4668746502b5"} Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.007297 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" event={"ID":"bfde7108-9097-4b59-b5a2-24bfa50f9884","Type":"ContainerStarted","Data":"cc9aa936844c2e8fbdf0fb03c421d7cb1f44cf85df3cb006a65b729676e8fe32"} Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.602658 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.726414 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.726683 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-central-agent" containerID="cri-o://6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c" gracePeriod=30 Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.726698 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="proxy-httpd" containerID="cri-o://db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e" gracePeriod=30 Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.726754 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-notification-agent" containerID="cri-o://f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971" gracePeriod=30 Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.726745 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="sg-core" containerID="cri-o://059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef" gracePeriod=30 Dec 10 15:41:29 crc kubenswrapper[4669]: I1210 15:41:29.737307 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.179:3000/\": EOF" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.074099 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" event={"ID":"bfde7108-9097-4b59-b5a2-24bfa50f9884","Type":"ContainerStarted","Data":"f502b646e18d906ab39dfad37d1f7a8641a37e67cebd79f00847c45a797d28a0"} Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.074447 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105012 4669 generic.go:334] "Generic (PLEG): container finished" podID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerID="db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e" exitCode=0 Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105050 4669 generic.go:334] "Generic (PLEG): container finished" podID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerID="059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef" exitCode=2 Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105320 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-log" containerID="cri-o://a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2" gracePeriod=30 Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105420 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerDied","Data":"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e"} Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105450 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerDied","Data":"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef"} Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105793 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" podStartSLOduration=3.105782023 podStartE2EDuration="3.105782023s" podCreationTimestamp="2025-12-10 15:41:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:30.099893687 +0000 UTC m=+1264.016840314" watchObservedRunningTime="2025-12-10 15:41:30.105782023 +0000 UTC m=+1264.022728650" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.105821 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-api" containerID="cri-o://7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862" gracePeriod=30 Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.760772 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925176 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-combined-ca-bundle\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925602 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-scripts\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925744 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-run-httpd\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925829 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-sg-core-conf-yaml\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925864 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-ceilometer-tls-certs\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925894 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-config-data\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925924 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-log-httpd\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.925958 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6cvkz\" (UniqueName: \"kubernetes.io/projected/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-kube-api-access-6cvkz\") pod \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\" (UID: \"f194a5b1-d81a-40c1-bb6c-597ccb82aeef\") " Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.933017 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.938397 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.944824 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-scripts" (OuterVolumeSpecName: "scripts") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.945730 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-kube-api-access-6cvkz" (OuterVolumeSpecName: "kube-api-access-6cvkz") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "kube-api-access-6cvkz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.963704 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:30 crc kubenswrapper[4669]: I1210 15:41:30.976050 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.010646 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033099 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033135 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033151 4669 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033164 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033175 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6cvkz\" (UniqueName: \"kubernetes.io/projected/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-kube-api-access-6cvkz\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033186 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033196 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.033906 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-config-data" (OuterVolumeSpecName: "config-data") pod "f194a5b1-d81a-40c1-bb6c-597ccb82aeef" (UID: "f194a5b1-d81a-40c1-bb6c-597ccb82aeef"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.116862 4669 generic.go:334] "Generic (PLEG): container finished" podID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerID="f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971" exitCode=0 Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.116902 4669 generic.go:334] "Generic (PLEG): container finished" podID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerID="6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c" exitCode=0 Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.116940 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerDied","Data":"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971"} Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.116968 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerDied","Data":"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c"} Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.116978 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f194a5b1-d81a-40c1-bb6c-597ccb82aeef","Type":"ContainerDied","Data":"2a4abe318a31296ffae281264570a9aec149cb34b3030cc804db280061accf06"} Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.116995 4669 scope.go:117] "RemoveContainer" containerID="db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.117113 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.120442 4669 generic.go:334] "Generic (PLEG): container finished" podID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerID="a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2" exitCode=143 Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.120544 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5609c38e-ed5f-46ca-916d-b5a4bad23acf","Type":"ContainerDied","Data":"a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2"} Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.134813 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f194a5b1-d81a-40c1-bb6c-597ccb82aeef-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.140659 4669 scope.go:117] "RemoveContainer" containerID="059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.156335 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.160980 4669 scope.go:117] "RemoveContainer" containerID="f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.168518 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.180940 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181050 4669 scope.go:117] "RemoveContainer" containerID="6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.181346 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-notification-agent" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181386 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-notification-agent" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.181398 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="proxy-httpd" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181404 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="proxy-httpd" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.181421 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="sg-core" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181429 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="sg-core" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.181549 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-central-agent" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181556 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-central-agent" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181819 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-notification-agent" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181843 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="sg-core" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181854 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="ceilometer-central-agent" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.181865 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" containerName="proxy-httpd" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.184142 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.193730 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.193945 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.209722 4669 scope.go:117] "RemoveContainer" containerID="db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.210127 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e\": container with ID starting with db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e not found: ID does not exist" containerID="db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210151 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e"} err="failed to get container status \"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e\": rpc error: code = NotFound desc = could not find container \"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e\": container with ID starting with db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210172 4669 scope.go:117] "RemoveContainer" containerID="059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.210459 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef\": container with ID starting with 059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef not found: ID does not exist" containerID="059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210476 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef"} err="failed to get container status \"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef\": rpc error: code = NotFound desc = could not find container \"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef\": container with ID starting with 059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210489 4669 scope.go:117] "RemoveContainer" containerID="f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.210657 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971\": container with ID starting with f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971 not found: ID does not exist" containerID="f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210674 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971"} err="failed to get container status \"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971\": rpc error: code = NotFound desc = could not find container \"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971\": container with ID starting with f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971 not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210686 4669 scope.go:117] "RemoveContainer" containerID="6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c" Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.210869 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c\": container with ID starting with 6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c not found: ID does not exist" containerID="6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210884 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c"} err="failed to get container status \"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c\": rpc error: code = NotFound desc = could not find container \"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c\": container with ID starting with 6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.210897 4669 scope.go:117] "RemoveContainer" containerID="db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211058 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e"} err="failed to get container status \"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e\": rpc error: code = NotFound desc = could not find container \"db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e\": container with ID starting with db9708f1a542caa030413a423a91b3fcaa5ee9c3222c5c39f7403674d7593d4e not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211071 4669 scope.go:117] "RemoveContainer" containerID="059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211379 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef"} err="failed to get container status \"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef\": rpc error: code = NotFound desc = could not find container \"059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef\": container with ID starting with 059686cdae242b7fca644f67622946f5ef45c5aada25c806d1483d7ff55314ef not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211397 4669 scope.go:117] "RemoveContainer" containerID="f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211565 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971"} err="failed to get container status \"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971\": rpc error: code = NotFound desc = could not find container \"f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971\": container with ID starting with f4d3344c7944add96fad502cebd68f343a6b47a9653907fd9b9c25cc83790971 not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211579 4669 scope.go:117] "RemoveContainer" containerID="6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.211745 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c"} err="failed to get container status \"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c\": rpc error: code = NotFound desc = could not find container \"6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c\": container with ID starting with 6e4f0435e3f0d7033db6c3e0b5b7501369df0fbc482655ff9fa3e748c4a0e30c not found: ID does not exist" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.254179 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.257770 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.318851 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:31 crc kubenswrapper[4669]: E1210 15:41:31.319737 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-qnp2c log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="e6a7966b-d089-4bfb-a276-8bea7cf4abf5" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355187 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnp2c\" (UniqueName: \"kubernetes.io/projected/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-kube-api-access-qnp2c\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355534 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-config-data\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355647 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-log-httpd\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355731 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355819 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-run-httpd\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355882 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.355990 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.356070 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-scripts\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.457816 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-run-httpd\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.457867 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.457965 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.457998 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-scripts\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.458040 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnp2c\" (UniqueName: \"kubernetes.io/projected/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-kube-api-access-qnp2c\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.458399 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-run-httpd\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.458422 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-config-data\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.458454 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-log-httpd\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.458501 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.458862 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-log-httpd\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.462618 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-scripts\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.462905 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.463498 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.465473 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-config-data\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.465833 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:31 crc kubenswrapper[4669]: I1210 15:41:31.476847 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnp2c\" (UniqueName: \"kubernetes.io/projected/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-kube-api-access-qnp2c\") pod \"ceilometer-0\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " pod="openstack/ceilometer-0" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.133703 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.146695 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.270901 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-sg-core-conf-yaml\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.270986 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-scripts\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271120 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-log-httpd\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271195 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnp2c\" (UniqueName: \"kubernetes.io/projected/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-kube-api-access-qnp2c\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271254 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-ceilometer-tls-certs\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271276 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-config-data\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271303 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-combined-ca-bundle\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271337 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-run-httpd\") pod \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\" (UID: \"e6a7966b-d089-4bfb-a276-8bea7cf4abf5\") " Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271448 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.271697 4669 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.272348 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.276029 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.276061 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.276108 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.276437 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-kube-api-access-qnp2c" (OuterVolumeSpecName: "kube-api-access-qnp2c") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "kube-api-access-qnp2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.277086 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-config-data" (OuterVolumeSpecName: "config-data") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.278936 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-scripts" (OuterVolumeSpecName: "scripts") pod "e6a7966b-d089-4bfb-a276-8bea7cf4abf5" (UID: "e6a7966b-d089-4bfb-a276-8bea7cf4abf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.372933 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.372973 4669 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.372984 4669 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.372993 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.373002 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnp2c\" (UniqueName: \"kubernetes.io/projected/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-kube-api-access-qnp2c\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.373011 4669 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.373019 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e6a7966b-d089-4bfb-a276-8bea7cf4abf5-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:32 crc kubenswrapper[4669]: I1210 15:41:32.409162 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f194a5b1-d81a-40c1-bb6c-597ccb82aeef" path="/var/lib/kubelet/pods/f194a5b1-d81a-40c1-bb6c-597ccb82aeef/volumes" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.144505 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.188142 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.200266 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.238494 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.275050 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.284019 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.284296 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.284544 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.288812 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290713 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290759 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290777 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wlt2s\" (UniqueName: \"kubernetes.io/projected/38eb24f6-e94c-4469-8284-4e3e79ca1712-kube-api-access-wlt2s\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290795 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290869 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38eb24f6-e94c-4469-8284-4e3e79ca1712-run-httpd\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290885 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38eb24f6-e94c-4469-8284-4e3e79ca1712-log-httpd\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290900 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-scripts\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.290913 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-config-data\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.392576 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.392835 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.392874 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wlt2s\" (UniqueName: \"kubernetes.io/projected/38eb24f6-e94c-4469-8284-4e3e79ca1712-kube-api-access-wlt2s\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.392913 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.393349 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38eb24f6-e94c-4469-8284-4e3e79ca1712-run-httpd\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.393375 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38eb24f6-e94c-4469-8284-4e3e79ca1712-log-httpd\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.393390 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-scripts\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.393563 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-config-data\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.393889 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38eb24f6-e94c-4469-8284-4e3e79ca1712-log-httpd\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.393978 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38eb24f6-e94c-4469-8284-4e3e79ca1712-run-httpd\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.402119 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.402857 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.405084 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-scripts\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.413463 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-config-data\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.414025 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.422185 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wlt2s\" (UniqueName: \"kubernetes.io/projected/38eb24f6-e94c-4469-8284-4e3e79ca1712-kube-api-access-wlt2s\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.428564 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.429309 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/38eb24f6-e94c-4469-8284-4e3e79ca1712-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"38eb24f6-e94c-4469-8284-4e3e79ca1712\") " pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.523178 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.632365 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.698916 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kw78\" (UniqueName: \"kubernetes.io/projected/5609c38e-ed5f-46ca-916d-b5a4bad23acf-kube-api-access-5kw78\") pod \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.698958 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-combined-ca-bundle\") pod \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.699048 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5609c38e-ed5f-46ca-916d-b5a4bad23acf-logs\") pod \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.699137 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-config-data\") pod \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\" (UID: \"5609c38e-ed5f-46ca-916d-b5a4bad23acf\") " Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.700746 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5609c38e-ed5f-46ca-916d-b5a4bad23acf-logs" (OuterVolumeSpecName: "logs") pod "5609c38e-ed5f-46ca-916d-b5a4bad23acf" (UID: "5609c38e-ed5f-46ca-916d-b5a4bad23acf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.705477 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5609c38e-ed5f-46ca-916d-b5a4bad23acf-kube-api-access-5kw78" (OuterVolumeSpecName: "kube-api-access-5kw78") pod "5609c38e-ed5f-46ca-916d-b5a4bad23acf" (UID: "5609c38e-ed5f-46ca-916d-b5a4bad23acf"). InnerVolumeSpecName "kube-api-access-5kw78". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.732150 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-config-data" (OuterVolumeSpecName: "config-data") pod "5609c38e-ed5f-46ca-916d-b5a4bad23acf" (UID: "5609c38e-ed5f-46ca-916d-b5a4bad23acf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.745969 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5609c38e-ed5f-46ca-916d-b5a4bad23acf" (UID: "5609c38e-ed5f-46ca-916d-b5a4bad23acf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.804729 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kw78\" (UniqueName: \"kubernetes.io/projected/5609c38e-ed5f-46ca-916d-b5a4bad23acf-kube-api-access-5kw78\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.804763 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.804777 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5609c38e-ed5f-46ca-916d-b5a4bad23acf-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:33 crc kubenswrapper[4669]: I1210 15:41:33.804806 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5609c38e-ed5f-46ca-916d-b5a4bad23acf-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.041332 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.153659 4669 generic.go:334] "Generic (PLEG): container finished" podID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerID="7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862" exitCode=0 Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.153756 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5609c38e-ed5f-46ca-916d-b5a4bad23acf","Type":"ContainerDied","Data":"7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862"} Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.153785 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5609c38e-ed5f-46ca-916d-b5a4bad23acf","Type":"ContainerDied","Data":"a9e2eb36a66cecb63ee0db2d63e869a00c54dd74f059efb2d07a055cd87e7195"} Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.153803 4669 scope.go:117] "RemoveContainer" containerID="7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.153922 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.159209 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38eb24f6-e94c-4469-8284-4e3e79ca1712","Type":"ContainerStarted","Data":"1443693e4dbe1980f559901f699b7c77cfe7fe675dd1c146424cc269a8a2a4d6"} Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.179742 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.188286 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.196343 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.197413 4669 scope.go:117] "RemoveContainer" containerID="a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.220802 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:34 crc kubenswrapper[4669]: E1210 15:41:34.221426 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-log" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.221447 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-log" Dec 10 15:41:34 crc kubenswrapper[4669]: E1210 15:41:34.221481 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-api" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.221490 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-api" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.221777 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-log" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.221808 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" containerName="nova-api-api" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.223032 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.227060 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.227079 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.227384 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.228622 4669 scope.go:117] "RemoveContainer" containerID="7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862" Dec 10 15:41:34 crc kubenswrapper[4669]: E1210 15:41:34.230286 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862\": container with ID starting with 7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862 not found: ID does not exist" containerID="7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.230327 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862"} err="failed to get container status \"7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862\": rpc error: code = NotFound desc = could not find container \"7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862\": container with ID starting with 7edf162591194fd8ce497732891825d2b35f6fbcf3c4dae6443204a671f0d862 not found: ID does not exist" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.230354 4669 scope.go:117] "RemoveContainer" containerID="a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2" Dec 10 15:41:34 crc kubenswrapper[4669]: E1210 15:41:34.231491 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2\": container with ID starting with a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2 not found: ID does not exist" containerID="a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.231541 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2"} err="failed to get container status \"a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2\": rpc error: code = NotFound desc = could not find container \"a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2\": container with ID starting with a5c1d7fec234ae13796453922e556241b875d8c17b06e439c6a72f619fada5b2 not found: ID does not exist" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.259288 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.314194 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597cc780-543b-4a5d-8ac8-f045c697740e-logs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.314303 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.314352 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.314510 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t56gt\" (UniqueName: \"kubernetes.io/projected/597cc780-543b-4a5d-8ac8-f045c697740e-kube-api-access-t56gt\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.314575 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-config-data\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.314645 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-public-tls-certs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.409012 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5609c38e-ed5f-46ca-916d-b5a4bad23acf" path="/var/lib/kubelet/pods/5609c38e-ed5f-46ca-916d-b5a4bad23acf/volumes" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.409699 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6a7966b-d089-4bfb-a276-8bea7cf4abf5" path="/var/lib/kubelet/pods/e6a7966b-d089-4bfb-a276-8bea7cf4abf5/volumes" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.416423 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t56gt\" (UniqueName: \"kubernetes.io/projected/597cc780-543b-4a5d-8ac8-f045c697740e-kube-api-access-t56gt\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.416474 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-config-data\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.416498 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-public-tls-certs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.416588 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597cc780-543b-4a5d-8ac8-f045c697740e-logs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.416615 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.416669 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.417551 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597cc780-543b-4a5d-8ac8-f045c697740e-logs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.421708 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-public-tls-certs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.421866 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-internal-tls-certs\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.422958 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.425204 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-config-data\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.456037 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-dszds"] Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.460079 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.464792 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t56gt\" (UniqueName: \"kubernetes.io/projected/597cc780-543b-4a5d-8ac8-f045c697740e-kube-api-access-t56gt\") pod \"nova-api-0\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.465805 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.465943 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.472621 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dszds"] Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.535630 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-scripts\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.535765 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.535802 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-config-data\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.535848 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8n9q\" (UniqueName: \"kubernetes.io/projected/873dc098-d2be-4293-8179-167941e30e1e-kube-api-access-q8n9q\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.541393 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.637282 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.638274 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-config-data\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.638405 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8n9q\" (UniqueName: \"kubernetes.io/projected/873dc098-d2be-4293-8179-167941e30e1e-kube-api-access-q8n9q\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.638560 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-scripts\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.642031 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.644624 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-scripts\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.648166 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-config-data\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.660624 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8n9q\" (UniqueName: \"kubernetes.io/projected/873dc098-d2be-4293-8179-167941e30e1e-kube-api-access-q8n9q\") pod \"nova-cell1-cell-mapping-dszds\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:34 crc kubenswrapper[4669]: I1210 15:41:34.853618 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:35 crc kubenswrapper[4669]: I1210 15:41:35.057845 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:35 crc kubenswrapper[4669]: I1210 15:41:35.178111 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"597cc780-543b-4a5d-8ac8-f045c697740e","Type":"ContainerStarted","Data":"373c31cf36ab3277e28ec3d5ea42d345d18a46991b862438ba0953e542f85cf6"} Dec 10 15:41:35 crc kubenswrapper[4669]: I1210 15:41:35.180613 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38eb24f6-e94c-4469-8284-4e3e79ca1712","Type":"ContainerStarted","Data":"d6dae93f1775703433de108c6a4281d1f189c047b5719c17a458b55b704a75f7"} Dec 10 15:41:35 crc kubenswrapper[4669]: I1210 15:41:35.318566 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-dszds"] Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.193673 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38eb24f6-e94c-4469-8284-4e3e79ca1712","Type":"ContainerStarted","Data":"2ef5a91faad1ed6ef0af2dfa70f9e7c04d7b7d13a3a1d56b40a611642b471164"} Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.195740 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"597cc780-543b-4a5d-8ac8-f045c697740e","Type":"ContainerStarted","Data":"f1c90b7473436fc650ec3b30c75426907d85f692610a9ac65b0145fe673c0312"} Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.195848 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"597cc780-543b-4a5d-8ac8-f045c697740e","Type":"ContainerStarted","Data":"eca24ed5dd0f3873052eb69d0b8e4cc761a34508cfc55a2be9681617e0942b73"} Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.199163 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dszds" event={"ID":"873dc098-d2be-4293-8179-167941e30e1e","Type":"ContainerStarted","Data":"445fefb3ebf011927ebd7c50db2fd00f3e4999caf3058e5099800a3180eafb12"} Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.199203 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dszds" event={"ID":"873dc098-d2be-4293-8179-167941e30e1e","Type":"ContainerStarted","Data":"e9209433652d7e28c66103afe0439b1a22761dd1fefc10b73a8f7668e3c9e2fd"} Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.220864 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.220812331 podStartE2EDuration="2.220812331s" podCreationTimestamp="2025-12-10 15:41:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:36.218666118 +0000 UTC m=+1270.135612775" watchObservedRunningTime="2025-12-10 15:41:36.220812331 +0000 UTC m=+1270.137758978" Dec 10 15:41:36 crc kubenswrapper[4669]: I1210 15:41:36.250107 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-dszds" podStartSLOduration=2.250088436 podStartE2EDuration="2.250088436s" podCreationTimestamp="2025-12-10 15:41:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:36.244283983 +0000 UTC m=+1270.161230620" watchObservedRunningTime="2025-12-10 15:41:36.250088436 +0000 UTC m=+1270.167035063" Dec 10 15:41:37 crc kubenswrapper[4669]: I1210 15:41:37.211555 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38eb24f6-e94c-4469-8284-4e3e79ca1712","Type":"ContainerStarted","Data":"f730e1d2defcbe8790e879f41b67b4d75586d6526a0096f48f692e22f22f3bb8"} Dec 10 15:41:37 crc kubenswrapper[4669]: I1210 15:41:37.580097 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:41:37 crc kubenswrapper[4669]: I1210 15:41:37.655193 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-ngxq7"] Dec 10 15:41:37 crc kubenswrapper[4669]: I1210 15:41:37.655588 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerName="dnsmasq-dns" containerID="cri-o://9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4" gracePeriod=10 Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.190434 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.228050 4669 generic.go:334] "Generic (PLEG): container finished" podID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerID="9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4" exitCode=0 Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.228091 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" event={"ID":"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780","Type":"ContainerDied","Data":"9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4"} Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.228117 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" event={"ID":"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780","Type":"ContainerDied","Data":"5a0c957155e0b6154c64d19413aac14430b55696b707610bbf0bc2a96e6ca593"} Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.228133 4669 scope.go:117] "RemoveContainer" containerID="9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.228313 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8b8cf6657-ngxq7" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.299327 4669 scope.go:117] "RemoveContainer" containerID="c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.320294 4669 scope.go:117] "RemoveContainer" containerID="9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4" Dec 10 15:41:38 crc kubenswrapper[4669]: E1210 15:41:38.320751 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4\": container with ID starting with 9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4 not found: ID does not exist" containerID="9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.320793 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4"} err="failed to get container status \"9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4\": rpc error: code = NotFound desc = could not find container \"9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4\": container with ID starting with 9ca9a8856b5724b323517e5bffd0a69b312ff032272843797340b47137b69bc4 not found: ID does not exist" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.320819 4669 scope.go:117] "RemoveContainer" containerID="c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d" Dec 10 15:41:38 crc kubenswrapper[4669]: E1210 15:41:38.321130 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d\": container with ID starting with c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d not found: ID does not exist" containerID="c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.321161 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d"} err="failed to get container status \"c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d\": rpc error: code = NotFound desc = could not find container \"c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d\": container with ID starting with c83d394df5d5e03c1e95e70e1b17016dfb2cdf43c56084da2177e5c2a109f68d not found: ID does not exist" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.325897 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-dns-svc\") pod \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.326016 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-sb\") pod \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.326101 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-nb\") pod \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.326235 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x66x\" (UniqueName: \"kubernetes.io/projected/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-kube-api-access-7x66x\") pod \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.326321 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-config\") pod \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\" (UID: \"e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780\") " Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.331251 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-kube-api-access-7x66x" (OuterVolumeSpecName: "kube-api-access-7x66x") pod "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" (UID: "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780"). InnerVolumeSpecName "kube-api-access-7x66x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.380127 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-config" (OuterVolumeSpecName: "config") pod "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" (UID: "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.381640 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" (UID: "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.385736 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" (UID: "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.386139 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" (UID: "e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.429998 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.430040 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.430054 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.430100 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x66x\" (UniqueName: \"kubernetes.io/projected/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-kube-api-access-7x66x\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.430113 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.552118 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-ngxq7"] Dec 10 15:41:38 crc kubenswrapper[4669]: I1210 15:41:38.563621 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8b8cf6657-ngxq7"] Dec 10 15:41:39 crc kubenswrapper[4669]: I1210 15:41:39.242305 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38eb24f6-e94c-4469-8284-4e3e79ca1712","Type":"ContainerStarted","Data":"7e8f6835d24475a48c6e4607d78cb8f8aed284d386b1e41870809745967f4e8f"} Dec 10 15:41:39 crc kubenswrapper[4669]: I1210 15:41:39.242461 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 10 15:41:39 crc kubenswrapper[4669]: I1210 15:41:39.265920 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.42240499 podStartE2EDuration="6.265905108s" podCreationTimestamp="2025-12-10 15:41:33 +0000 UTC" firstStartedPulling="2025-12-10 15:41:34.054350174 +0000 UTC m=+1267.971296801" lastFinishedPulling="2025-12-10 15:41:37.897850292 +0000 UTC m=+1271.814796919" observedRunningTime="2025-12-10 15:41:39.264114004 +0000 UTC m=+1273.181060631" watchObservedRunningTime="2025-12-10 15:41:39.265905108 +0000 UTC m=+1273.182851735" Dec 10 15:41:40 crc kubenswrapper[4669]: I1210 15:41:40.409987 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" path="/var/lib/kubelet/pods/e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780/volumes" Dec 10 15:41:41 crc kubenswrapper[4669]: I1210 15:41:41.260375 4669 generic.go:334] "Generic (PLEG): container finished" podID="873dc098-d2be-4293-8179-167941e30e1e" containerID="445fefb3ebf011927ebd7c50db2fd00f3e4999caf3058e5099800a3180eafb12" exitCode=0 Dec 10 15:41:41 crc kubenswrapper[4669]: I1210 15:41:41.260417 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dszds" event={"ID":"873dc098-d2be-4293-8179-167941e30e1e","Type":"ContainerDied","Data":"445fefb3ebf011927ebd7c50db2fd00f3e4999caf3058e5099800a3180eafb12"} Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.658257 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.830319 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8n9q\" (UniqueName: \"kubernetes.io/projected/873dc098-d2be-4293-8179-167941e30e1e-kube-api-access-q8n9q\") pod \"873dc098-d2be-4293-8179-167941e30e1e\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.830560 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-scripts\") pod \"873dc098-d2be-4293-8179-167941e30e1e\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.831776 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-combined-ca-bundle\") pod \"873dc098-d2be-4293-8179-167941e30e1e\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.831821 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-config-data\") pod \"873dc098-d2be-4293-8179-167941e30e1e\" (UID: \"873dc098-d2be-4293-8179-167941e30e1e\") " Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.836025 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-scripts" (OuterVolumeSpecName: "scripts") pod "873dc098-d2be-4293-8179-167941e30e1e" (UID: "873dc098-d2be-4293-8179-167941e30e1e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.850782 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/873dc098-d2be-4293-8179-167941e30e1e-kube-api-access-q8n9q" (OuterVolumeSpecName: "kube-api-access-q8n9q") pod "873dc098-d2be-4293-8179-167941e30e1e" (UID: "873dc098-d2be-4293-8179-167941e30e1e"). InnerVolumeSpecName "kube-api-access-q8n9q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.866137 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "873dc098-d2be-4293-8179-167941e30e1e" (UID: "873dc098-d2be-4293-8179-167941e30e1e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.868696 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-config-data" (OuterVolumeSpecName: "config-data") pod "873dc098-d2be-4293-8179-167941e30e1e" (UID: "873dc098-d2be-4293-8179-167941e30e1e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.934197 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8n9q\" (UniqueName: \"kubernetes.io/projected/873dc098-d2be-4293-8179-167941e30e1e-kube-api-access-q8n9q\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.934284 4669 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-scripts\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.934294 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:42 crc kubenswrapper[4669]: I1210 15:41:42.934302 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/873dc098-d2be-4293-8179-167941e30e1e-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.301046 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-dszds" event={"ID":"873dc098-d2be-4293-8179-167941e30e1e","Type":"ContainerDied","Data":"e9209433652d7e28c66103afe0439b1a22761dd1fefc10b73a8f7668e3c9e2fd"} Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.301093 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e9209433652d7e28c66103afe0439b1a22761dd1fefc10b73a8f7668e3c9e2fd" Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.301424 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-dszds" Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.488019 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.488524 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-log" containerID="cri-o://eca24ed5dd0f3873052eb69d0b8e4cc761a34508cfc55a2be9681617e0942b73" gracePeriod=30 Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.488654 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-api" containerID="cri-o://f1c90b7473436fc650ec3b30c75426907d85f692610a9ac65b0145fe673c0312" gracePeriod=30 Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.521583 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.521837 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" containerName="nova-scheduler-scheduler" containerID="cri-o://dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03" gracePeriod=30 Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.549481 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.549742 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-log" containerID="cri-o://48528e16997e1a73f4287b85dce77d7c03a6c812c4b23dffe16b2b642361b8d7" gracePeriod=30 Dec 10 15:41:43 crc kubenswrapper[4669]: I1210 15:41:43.550146 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-metadata" containerID="cri-o://37879abaca4b6ef6ca66e042310f69c40da482caa62ef5927dbeb3e7b4ed2ca2" gracePeriod=30 Dec 10 15:41:44 crc kubenswrapper[4669]: I1210 15:41:44.318206 4669 generic.go:334] "Generic (PLEG): container finished" podID="497fab74-8305-45ea-9de7-45b02f3efafb" containerID="48528e16997e1a73f4287b85dce77d7c03a6c812c4b23dffe16b2b642361b8d7" exitCode=143 Dec 10 15:41:44 crc kubenswrapper[4669]: I1210 15:41:44.318290 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"497fab74-8305-45ea-9de7-45b02f3efafb","Type":"ContainerDied","Data":"48528e16997e1a73f4287b85dce77d7c03a6c812c4b23dffe16b2b642361b8d7"} Dec 10 15:41:44 crc kubenswrapper[4669]: I1210 15:41:44.320334 4669 generic.go:334] "Generic (PLEG): container finished" podID="597cc780-543b-4a5d-8ac8-f045c697740e" containerID="f1c90b7473436fc650ec3b30c75426907d85f692610a9ac65b0145fe673c0312" exitCode=0 Dec 10 15:41:44 crc kubenswrapper[4669]: I1210 15:41:44.320360 4669 generic.go:334] "Generic (PLEG): container finished" podID="597cc780-543b-4a5d-8ac8-f045c697740e" containerID="eca24ed5dd0f3873052eb69d0b8e4cc761a34508cfc55a2be9681617e0942b73" exitCode=143 Dec 10 15:41:44 crc kubenswrapper[4669]: I1210 15:41:44.320380 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"597cc780-543b-4a5d-8ac8-f045c697740e","Type":"ContainerDied","Data":"f1c90b7473436fc650ec3b30c75426907d85f692610a9ac65b0145fe673c0312"} Dec 10 15:41:44 crc kubenswrapper[4669]: I1210 15:41:44.320403 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"597cc780-543b-4a5d-8ac8-f045c697740e","Type":"ContainerDied","Data":"eca24ed5dd0f3873052eb69d0b8e4cc761a34508cfc55a2be9681617e0942b73"} Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.116600 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03 is running failed: container process not found" containerID="dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.119535 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03 is running failed: container process not found" containerID="dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.121168 4669 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03 is running failed: container process not found" containerID="dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.121205 4669 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" containerName="nova-scheduler-scheduler" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.153700 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.281205 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t56gt\" (UniqueName: \"kubernetes.io/projected/597cc780-543b-4a5d-8ac8-f045c697740e-kube-api-access-t56gt\") pod \"597cc780-543b-4a5d-8ac8-f045c697740e\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.281365 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-config-data\") pod \"597cc780-543b-4a5d-8ac8-f045c697740e\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.281384 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-public-tls-certs\") pod \"597cc780-543b-4a5d-8ac8-f045c697740e\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.281416 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-combined-ca-bundle\") pod \"597cc780-543b-4a5d-8ac8-f045c697740e\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.281466 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597cc780-543b-4a5d-8ac8-f045c697740e-logs\") pod \"597cc780-543b-4a5d-8ac8-f045c697740e\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.281496 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-internal-tls-certs\") pod \"597cc780-543b-4a5d-8ac8-f045c697740e\" (UID: \"597cc780-543b-4a5d-8ac8-f045c697740e\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.284115 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/597cc780-543b-4a5d-8ac8-f045c697740e-logs" (OuterVolumeSpecName: "logs") pod "597cc780-543b-4a5d-8ac8-f045c697740e" (UID: "597cc780-543b-4a5d-8ac8-f045c697740e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.303573 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/597cc780-543b-4a5d-8ac8-f045c697740e-kube-api-access-t56gt" (OuterVolumeSpecName: "kube-api-access-t56gt") pod "597cc780-543b-4a5d-8ac8-f045c697740e" (UID: "597cc780-543b-4a5d-8ac8-f045c697740e"). InnerVolumeSpecName "kube-api-access-t56gt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.311238 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-config-data" (OuterVolumeSpecName: "config-data") pod "597cc780-543b-4a5d-8ac8-f045c697740e" (UID: "597cc780-543b-4a5d-8ac8-f045c697740e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.323645 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "597cc780-543b-4a5d-8ac8-f045c697740e" (UID: "597cc780-543b-4a5d-8ac8-f045c697740e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.373158 4669 generic.go:334] "Generic (PLEG): container finished" podID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" containerID="dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03" exitCode=0 Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.373264 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c6f4c48-e20a-4cdd-a564-29bfe748af7d","Type":"ContainerDied","Data":"dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03"} Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.375623 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"597cc780-543b-4a5d-8ac8-f045c697740e","Type":"ContainerDied","Data":"373c31cf36ab3277e28ec3d5ea42d345d18a46991b862438ba0953e542f85cf6"} Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.375656 4669 scope.go:117] "RemoveContainer" containerID="f1c90b7473436fc650ec3b30c75426907d85f692610a9ac65b0145fe673c0312" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.375770 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.377362 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "597cc780-543b-4a5d-8ac8-f045c697740e" (UID: "597cc780-543b-4a5d-8ac8-f045c697740e"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.383181 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.383239 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.383254 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/597cc780-543b-4a5d-8ac8-f045c697740e-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.383265 4669 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.383276 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t56gt\" (UniqueName: \"kubernetes.io/projected/597cc780-543b-4a5d-8ac8-f045c697740e-kube-api-access-t56gt\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.392536 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "597cc780-543b-4a5d-8ac8-f045c697740e" (UID: "597cc780-543b-4a5d-8ac8-f045c697740e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.395902 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.423777 4669 scope.go:117] "RemoveContainer" containerID="eca24ed5dd0f3873052eb69d0b8e4cc761a34508cfc55a2be9681617e0942b73" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.486243 4669 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/597cc780-543b-4a5d-8ac8-f045c697740e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.587450 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-config-data\") pod \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.587495 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qb95\" (UniqueName: \"kubernetes.io/projected/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-kube-api-access-2qb95\") pod \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.587725 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-combined-ca-bundle\") pod \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\" (UID: \"3c6f4c48-e20a-4cdd-a564-29bfe748af7d\") " Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.590804 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-kube-api-access-2qb95" (OuterVolumeSpecName: "kube-api-access-2qb95") pod "3c6f4c48-e20a-4cdd-a564-29bfe748af7d" (UID: "3c6f4c48-e20a-4cdd-a564-29bfe748af7d"). InnerVolumeSpecName "kube-api-access-2qb95". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.618403 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3c6f4c48-e20a-4cdd-a564-29bfe748af7d" (UID: "3c6f4c48-e20a-4cdd-a564-29bfe748af7d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.619324 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-config-data" (OuterVolumeSpecName: "config-data") pod "3c6f4c48-e20a-4cdd-a564-29bfe748af7d" (UID: "3c6f4c48-e20a-4cdd-a564-29bfe748af7d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.689470 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.689513 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.689525 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qb95\" (UniqueName: \"kubernetes.io/projected/3c6f4c48-e20a-4cdd-a564-29bfe748af7d-kube-api-access-2qb95\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.708297 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.717161 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.742813 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.743354 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerName="dnsmasq-dns" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.743431 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerName="dnsmasq-dns" Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.743490 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="873dc098-d2be-4293-8179-167941e30e1e" containerName="nova-manage" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.743540 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="873dc098-d2be-4293-8179-167941e30e1e" containerName="nova-manage" Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.743600 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerName="init" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.743684 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerName="init" Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.743747 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-log" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.743796 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-log" Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.743855 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" containerName="nova-scheduler-scheduler" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.743903 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" containerName="nova-scheduler-scheduler" Dec 10 15:41:45 crc kubenswrapper[4669]: E1210 15:41:45.743960 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-api" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.744021 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-api" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.744269 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-api" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.744357 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3d0a3eb-59bd-4d46-a0da-c3eb8b52b780" containerName="dnsmasq-dns" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.744444 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="873dc098-d2be-4293-8179-167941e30e1e" containerName="nova-manage" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.744512 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" containerName="nova-api-log" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.744566 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" containerName="nova-scheduler-scheduler" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.745754 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.747859 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.748905 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.749140 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.756431 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.918416 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28r7r\" (UniqueName: \"kubernetes.io/projected/5dfb396a-215c-4b54-91ec-eff70e79dd70-kube-api-access-28r7r\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.918761 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dfb396a-215c-4b54-91ec-eff70e79dd70-logs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.918847 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.918922 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.918970 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-config-data\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:45 crc kubenswrapper[4669]: I1210 15:41:45.920805 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-public-tls-certs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.022527 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28r7r\" (UniqueName: \"kubernetes.io/projected/5dfb396a-215c-4b54-91ec-eff70e79dd70-kube-api-access-28r7r\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.022841 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dfb396a-215c-4b54-91ec-eff70e79dd70-logs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.023026 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.023923 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.024054 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-config-data\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.024234 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-public-tls-certs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.023140 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5dfb396a-215c-4b54-91ec-eff70e79dd70-logs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.027243 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-public-tls-certs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.027674 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.028038 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.029243 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5dfb396a-215c-4b54-91ec-eff70e79dd70-config-data\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.042499 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28r7r\" (UniqueName: \"kubernetes.io/projected/5dfb396a-215c-4b54-91ec-eff70e79dd70-kube-api-access-28r7r\") pod \"nova-api-0\" (UID: \"5dfb396a-215c-4b54-91ec-eff70e79dd70\") " pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.062024 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.387182 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3c6f4c48-e20a-4cdd-a564-29bfe748af7d","Type":"ContainerDied","Data":"a5969c3c1c86de0176873e1b62eaf3f86fb4e8057b58f3cf65992b6e121eab8f"} Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.387657 4669 scope.go:117] "RemoveContainer" containerID="dc701dd047b4c80e519d7118072793d209c7b183056d13565a51c8961e587a03" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.387355 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.416276 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="597cc780-543b-4a5d-8ac8-f045c697740e" path="/var/lib/kubelet/pods/597cc780-543b-4a5d-8ac8-f045c697740e/volumes" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.437375 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.443687 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.464153 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.465681 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.468964 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.487112 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.533937 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-config-data\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.534036 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj4mf\" (UniqueName: \"kubernetes.io/projected/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-kube-api-access-dj4mf\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.534088 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.579738 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.636672 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-config-data\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.636757 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj4mf\" (UniqueName: \"kubernetes.io/projected/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-kube-api-access-dj4mf\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.636804 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.643051 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-config-data\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.644300 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.660519 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj4mf\" (UniqueName: \"kubernetes.io/projected/ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2-kube-api-access-dj4mf\") pod \"nova-scheduler-0\" (UID: \"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2\") " pod="openstack/nova-scheduler-0" Dec 10 15:41:46 crc kubenswrapper[4669]: I1210 15:41:46.787201 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.021283 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.175:8775/\": read tcp 10.217.0.2:48034->10.217.0.175:8775: read: connection reset by peer" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.021284 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.175:8775/\": read tcp 10.217.0.2:48036->10.217.0.175:8775: read: connection reset by peer" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.267834 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.403722 4669 generic.go:334] "Generic (PLEG): container finished" podID="497fab74-8305-45ea-9de7-45b02f3efafb" containerID="37879abaca4b6ef6ca66e042310f69c40da482caa62ef5927dbeb3e7b4ed2ca2" exitCode=0 Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.403786 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"497fab74-8305-45ea-9de7-45b02f3efafb","Type":"ContainerDied","Data":"37879abaca4b6ef6ca66e042310f69c40da482caa62ef5927dbeb3e7b4ed2ca2"} Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.406530 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5dfb396a-215c-4b54-91ec-eff70e79dd70","Type":"ContainerStarted","Data":"d05ba47a8b15e9a44ea92e7fe488bbc378f2f2923ed71dbefe936fa4a34a4366"} Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.406560 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5dfb396a-215c-4b54-91ec-eff70e79dd70","Type":"ContainerStarted","Data":"b2051d2d08a95c8aa87b40c05b176c472fe2ad7a525be442edc564aa04681254"} Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.406571 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5dfb396a-215c-4b54-91ec-eff70e79dd70","Type":"ContainerStarted","Data":"b3f245c0374ffca9feac9f7da2f13dcdf887d08eba0145e5353f4d4451d8c547"} Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.412774 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.415177 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2","Type":"ContainerStarted","Data":"618cb4392dc2b1bb8d6a0de3516f51d3f23c34f7459a3ed2b67fc36ae285c6d3"} Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.439599 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.439579333 podStartE2EDuration="2.439579333s" podCreationTimestamp="2025-12-10 15:41:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:47.431804429 +0000 UTC m=+1281.348751066" watchObservedRunningTime="2025-12-10 15:41:47.439579333 +0000 UTC m=+1281.356525960" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.550829 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6pt87\" (UniqueName: \"kubernetes.io/projected/497fab74-8305-45ea-9de7-45b02f3efafb-kube-api-access-6pt87\") pod \"497fab74-8305-45ea-9de7-45b02f3efafb\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.551661 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-combined-ca-bundle\") pod \"497fab74-8305-45ea-9de7-45b02f3efafb\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.551863 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-config-data\") pod \"497fab74-8305-45ea-9de7-45b02f3efafb\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.551968 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-nova-metadata-tls-certs\") pod \"497fab74-8305-45ea-9de7-45b02f3efafb\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.552120 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/497fab74-8305-45ea-9de7-45b02f3efafb-logs\") pod \"497fab74-8305-45ea-9de7-45b02f3efafb\" (UID: \"497fab74-8305-45ea-9de7-45b02f3efafb\") " Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.554483 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/497fab74-8305-45ea-9de7-45b02f3efafb-logs" (OuterVolumeSpecName: "logs") pod "497fab74-8305-45ea-9de7-45b02f3efafb" (UID: "497fab74-8305-45ea-9de7-45b02f3efafb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.563383 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/497fab74-8305-45ea-9de7-45b02f3efafb-kube-api-access-6pt87" (OuterVolumeSpecName: "kube-api-access-6pt87") pod "497fab74-8305-45ea-9de7-45b02f3efafb" (UID: "497fab74-8305-45ea-9de7-45b02f3efafb"). InnerVolumeSpecName "kube-api-access-6pt87". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.632465 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "497fab74-8305-45ea-9de7-45b02f3efafb" (UID: "497fab74-8305-45ea-9de7-45b02f3efafb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.671718 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6pt87\" (UniqueName: \"kubernetes.io/projected/497fab74-8305-45ea-9de7-45b02f3efafb-kube-api-access-6pt87\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.671754 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.671763 4669 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/497fab74-8305-45ea-9de7-45b02f3efafb-logs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.724433 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-config-data" (OuterVolumeSpecName: "config-data") pod "497fab74-8305-45ea-9de7-45b02f3efafb" (UID: "497fab74-8305-45ea-9de7-45b02f3efafb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.747371 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "497fab74-8305-45ea-9de7-45b02f3efafb" (UID: "497fab74-8305-45ea-9de7-45b02f3efafb"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.773281 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:47 crc kubenswrapper[4669]: I1210 15:41:47.773317 4669 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/497fab74-8305-45ea-9de7-45b02f3efafb-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.410918 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c6f4c48-e20a-4cdd-a564-29bfe748af7d" path="/var/lib/kubelet/pods/3c6f4c48-e20a-4cdd-a564-29bfe748af7d/volumes" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.424845 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2","Type":"ContainerStarted","Data":"fdc1742326732a8d30accea90379e82df3a35961fef9e0b28f9c25346512b5c2"} Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.430831 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"497fab74-8305-45ea-9de7-45b02f3efafb","Type":"ContainerDied","Data":"e401a76a88d85c25832e5b44cc19c7bf6e3dc955aca2225ba5146cc84ebb9456"} Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.430894 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.430926 4669 scope.go:117] "RemoveContainer" containerID="37879abaca4b6ef6ca66e042310f69c40da482caa62ef5927dbeb3e7b4ed2ca2" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.474706 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.474684579 podStartE2EDuration="2.474684579s" podCreationTimestamp="2025-12-10 15:41:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:48.447811003 +0000 UTC m=+1282.364757650" watchObservedRunningTime="2025-12-10 15:41:48.474684579 +0000 UTC m=+1282.391631206" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.487270 4669 scope.go:117] "RemoveContainer" containerID="48528e16997e1a73f4287b85dce77d7c03a6c812c4b23dffe16b2b642361b8d7" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.520616 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.555262 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.555319 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:48 crc kubenswrapper[4669]: E1210 15:41:48.555769 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-log" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.555783 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-log" Dec 10 15:41:48 crc kubenswrapper[4669]: E1210 15:41:48.555794 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-metadata" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.555800 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-metadata" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.555974 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-metadata" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.556001 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" containerName="nova-metadata-log" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.556917 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.560801 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.560924 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.588632 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.695719 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-logs\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.696293 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxnxd\" (UniqueName: \"kubernetes.io/projected/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-kube-api-access-bxnxd\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.696499 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.696629 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-config-data\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.696761 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.799166 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.799678 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-logs\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.799830 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxnxd\" (UniqueName: \"kubernetes.io/projected/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-kube-api-access-bxnxd\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.800882 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.800950 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-config-data\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.801618 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-logs\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.805292 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.805902 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.806602 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-config-data\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.837460 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxnxd\" (UniqueName: \"kubernetes.io/projected/9d7aa3a5-9d54-482c-b662-c1b60dcb7b30-kube-api-access-bxnxd\") pod \"nova-metadata-0\" (UID: \"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30\") " pod="openstack/nova-metadata-0" Dec 10 15:41:48 crc kubenswrapper[4669]: I1210 15:41:48.889324 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 10 15:41:49 crc kubenswrapper[4669]: I1210 15:41:49.385688 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 10 15:41:49 crc kubenswrapper[4669]: I1210 15:41:49.447311 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30","Type":"ContainerStarted","Data":"945bb7490c48944e34495884826fc50620d77c2b323509dfc7c958d000a8f5ae"} Dec 10 15:41:50 crc kubenswrapper[4669]: I1210 15:41:50.412907 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="497fab74-8305-45ea-9de7-45b02f3efafb" path="/var/lib/kubelet/pods/497fab74-8305-45ea-9de7-45b02f3efafb/volumes" Dec 10 15:41:50 crc kubenswrapper[4669]: I1210 15:41:50.468728 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30","Type":"ContainerStarted","Data":"fe0484dd249b280da98bc1f0c0749bfa298817a84168215b8e1a0756ca03bd23"} Dec 10 15:41:50 crc kubenswrapper[4669]: I1210 15:41:50.468780 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"9d7aa3a5-9d54-482c-b662-c1b60dcb7b30","Type":"ContainerStarted","Data":"3718fd6d4ba715636c3985934e797dcedadb425cce96301708728bb5bb6a57aa"} Dec 10 15:41:50 crc kubenswrapper[4669]: I1210 15:41:50.511175 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.511131375 podStartE2EDuration="2.511131375s" podCreationTimestamp="2025-12-10 15:41:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:41:50.495760364 +0000 UTC m=+1284.412707061" watchObservedRunningTime="2025-12-10 15:41:50.511131375 +0000 UTC m=+1284.428078022" Dec 10 15:41:51 crc kubenswrapper[4669]: I1210 15:41:51.788189 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 10 15:41:53 crc kubenswrapper[4669]: I1210 15:41:53.890426 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 15:41:53 crc kubenswrapper[4669]: I1210 15:41:53.892457 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 10 15:41:56 crc kubenswrapper[4669]: I1210 15:41:56.062272 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 15:41:56 crc kubenswrapper[4669]: I1210 15:41:56.062595 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 10 15:41:56 crc kubenswrapper[4669]: I1210 15:41:56.788407 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 10 15:41:56 crc kubenswrapper[4669]: I1210 15:41:56.819012 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 10 15:41:57 crc kubenswrapper[4669]: I1210 15:41:57.078520 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5dfb396a-215c-4b54-91ec-eff70e79dd70" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.186:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:57 crc kubenswrapper[4669]: I1210 15:41:57.078495 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="5dfb396a-215c-4b54-91ec-eff70e79dd70" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.186:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:57 crc kubenswrapper[4669]: I1210 15:41:57.569055 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 10 15:41:58 crc kubenswrapper[4669]: I1210 15:41:58.890573 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 15:41:58 crc kubenswrapper[4669]: I1210 15:41:58.890813 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 10 15:41:59 crc kubenswrapper[4669]: I1210 15:41:59.911423 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9d7aa3a5-9d54-482c-b662-c1b60dcb7b30" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 10 15:41:59 crc kubenswrapper[4669]: I1210 15:41:59.912548 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="9d7aa3a5-9d54-482c-b662-c1b60dcb7b30" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.188:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 10 15:42:03 crc kubenswrapper[4669]: I1210 15:42:03.532796 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 10 15:42:06 crc kubenswrapper[4669]: I1210 15:42:06.075651 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 15:42:06 crc kubenswrapper[4669]: I1210 15:42:06.076763 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 15:42:06 crc kubenswrapper[4669]: I1210 15:42:06.077540 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 10 15:42:06 crc kubenswrapper[4669]: I1210 15:42:06.088772 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 15:42:06 crc kubenswrapper[4669]: I1210 15:42:06.635321 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 10 15:42:06 crc kubenswrapper[4669]: I1210 15:42:06.640811 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 10 15:42:08 crc kubenswrapper[4669]: I1210 15:42:08.895540 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 15:42:08 crc kubenswrapper[4669]: I1210 15:42:08.896302 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 10 15:42:08 crc kubenswrapper[4669]: I1210 15:42:08.905582 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 15:42:09 crc kubenswrapper[4669]: I1210 15:42:09.678916 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 10 15:42:18 crc kubenswrapper[4669]: I1210 15:42:18.200785 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:42:19 crc kubenswrapper[4669]: I1210 15:42:19.743463 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:42:22 crc kubenswrapper[4669]: I1210 15:42:22.597180 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="rabbitmq" containerID="cri-o://7827653ca7ec12642be72fd58ebf8511ed5d0a419bc8cc09d930e860a83e9513" gracePeriod=604796 Dec 10 15:42:24 crc kubenswrapper[4669]: I1210 15:42:24.476727 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="rabbitmq" containerID="cri-o://a78364c470182cf19c6d95b01f8816089a6bc59167765da5cec24e56ca46dcb6" gracePeriod=604796 Dec 10 15:42:28 crc kubenswrapper[4669]: I1210 15:42:28.235323 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Dec 10 15:42:28 crc kubenswrapper[4669]: I1210 15:42:28.653448 4669 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 10 15:42:28 crc kubenswrapper[4669]: I1210 15:42:28.850046 4669 generic.go:334] "Generic (PLEG): container finished" podID="b359c954-51b4-401c-a783-f0220d650a4b" containerID="7827653ca7ec12642be72fd58ebf8511ed5d0a419bc8cc09d930e860a83e9513" exitCode=0 Dec 10 15:42:28 crc kubenswrapper[4669]: I1210 15:42:28.850148 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b359c954-51b4-401c-a783-f0220d650a4b","Type":"ContainerDied","Data":"7827653ca7ec12642be72fd58ebf8511ed5d0a419bc8cc09d930e860a83e9513"} Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.675027 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735305 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-erlang-cookie\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735632 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fvq6\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-kube-api-access-6fvq6\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735685 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-server-conf\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735720 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b359c954-51b4-401c-a783-f0220d650a4b-erlang-cookie-secret\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735755 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735809 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-tls\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735865 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-confd\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735902 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-plugins-conf\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735881 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735921 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-config-data\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.735997 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b359c954-51b4-401c-a783-f0220d650a4b-pod-info\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.736087 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-plugins\") pod \"b359c954-51b4-401c-a783-f0220d650a4b\" (UID: \"b359c954-51b4-401c-a783-f0220d650a4b\") " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.736945 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.737627 4669 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.737647 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.746180 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-kube-api-access-6fvq6" (OuterVolumeSpecName: "kube-api-access-6fvq6") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "kube-api-access-6fvq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.749354 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.751058 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b359c954-51b4-401c-a783-f0220d650a4b-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.751292 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.753538 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/b359c954-51b4-401c-a783-f0220d650a4b-pod-info" (OuterVolumeSpecName: "pod-info") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.767263 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.779610 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-config-data" (OuterVolumeSpecName: "config-data") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.838572 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-server-conf" (OuterVolumeSpecName: "server-conf") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846451 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fvq6\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-kube-api-access-6fvq6\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846484 4669 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-server-conf\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846493 4669 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b359c954-51b4-401c-a783-f0220d650a4b-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846533 4669 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846543 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846551 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b359c954-51b4-401c-a783-f0220d650a4b-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846559 4669 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b359c954-51b4-401c-a783-f0220d650a4b-pod-info\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.846567 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.866268 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"b359c954-51b4-401c-a783-f0220d650a4b","Type":"ContainerDied","Data":"0057ab444d353c6d2c73fdcfcfcc99b8de2597e058a409108b9a86254ffd5254"} Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.866353 4669 scope.go:117] "RemoveContainer" containerID="7827653ca7ec12642be72fd58ebf8511ed5d0a419bc8cc09d930e860a83e9513" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.866463 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.886499 4669 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.918071 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "b359c954-51b4-401c-a783-f0220d650a4b" (UID: "b359c954-51b4-401c-a783-f0220d650a4b"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.948618 4669 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:29 crc kubenswrapper[4669]: I1210 15:42:29.948651 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b359c954-51b4-401c-a783-f0220d650a4b-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.084116 4669 scope.go:117] "RemoveContainer" containerID="f70abc287368477df634eb5a5310c6298fb195497f7e904832ecee389e85ffea" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.207289 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.221734 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.246692 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:42:30 crc kubenswrapper[4669]: E1210 15:42:30.247110 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="rabbitmq" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.247128 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="rabbitmq" Dec 10 15:42:30 crc kubenswrapper[4669]: E1210 15:42:30.247150 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="setup-container" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.247156 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="setup-container" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.247411 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="b359c954-51b4-401c-a783-f0220d650a4b" containerName="rabbitmq" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.248824 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.256699 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.256897 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.257010 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.257176 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.257312 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.257385 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.257413 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-wcsm7" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.350073 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355134 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-server-conf\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355242 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355261 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd9rb\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-kube-api-access-wd9rb\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355285 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/61575229-df2e-466a-858a-02d9fa0c1e79-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355304 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355328 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355354 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355401 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-config-data\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355434 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/61575229-df2e-466a-858a-02d9fa0c1e79-pod-info\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355453 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.355471 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.408471 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b359c954-51b4-401c-a783-f0220d650a4b" path="/var/lib/kubelet/pods/b359c954-51b4-401c-a783-f0220d650a4b/volumes" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.458671 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/61575229-df2e-466a-858a-02d9fa0c1e79-pod-info\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.458945 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459026 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459127 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-server-conf\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459310 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459401 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd9rb\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-kube-api-access-wd9rb\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459485 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/61575229-df2e-466a-858a-02d9fa0c1e79-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459570 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459665 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459752 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.459914 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-config-data\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.460941 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.466195 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.467128 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-server-conf\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.468787 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.461049 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.469437 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61575229-df2e-466a-858a-02d9fa0c1e79-config-data\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.475861 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.477954 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/61575229-df2e-466a-858a-02d9fa0c1e79-pod-info\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.482139 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/61575229-df2e-466a-858a-02d9fa0c1e79-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.484493 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd9rb\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-kube-api-access-wd9rb\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.486086 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/61575229-df2e-466a-858a-02d9fa0c1e79-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.530179 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"61575229-df2e-466a-858a-02d9fa0c1e79\") " pod="openstack/rabbitmq-server-0" Dec 10 15:42:30 crc kubenswrapper[4669]: I1210 15:42:30.571570 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 10 15:42:31 crc kubenswrapper[4669]: I1210 15:42:31.033239 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 10 15:42:31 crc kubenswrapper[4669]: I1210 15:42:31.886119 4669 generic.go:334] "Generic (PLEG): container finished" podID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerID="a78364c470182cf19c6d95b01f8816089a6bc59167765da5cec24e56ca46dcb6" exitCode=0 Dec 10 15:42:31 crc kubenswrapper[4669]: I1210 15:42:31.886283 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7714ed30-3730-4a63-8d4d-2b7e097cadbc","Type":"ContainerDied","Data":"a78364c470182cf19c6d95b01f8816089a6bc59167765da5cec24e56ca46dcb6"} Dec 10 15:42:31 crc kubenswrapper[4669]: I1210 15:42:31.887479 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"61575229-df2e-466a-858a-02d9fa0c1e79","Type":"ContainerStarted","Data":"e7a49ef06df7bb4355cfdbfdd7eaeb46a0e7a47f48e162181eda86fa8fe8d83b"} Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.385761 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397473 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-erlang-cookie\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397520 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qx565\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-kube-api-access-qx565\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397612 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-config-data\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397632 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-plugins-conf\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397650 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-confd\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397687 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-tls\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397721 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-server-conf\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397752 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-plugins\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397787 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397808 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7714ed30-3730-4a63-8d4d-2b7e097cadbc-pod-info\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397828 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7714ed30-3730-4a63-8d4d-2b7e097cadbc-erlang-cookie-secret\") pod \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\" (UID: \"7714ed30-3730-4a63-8d4d-2b7e097cadbc\") " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.397937 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.398615 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.398806 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.400046 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.424616 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-kube-api-access-qx565" (OuterVolumeSpecName: "kube-api-access-qx565") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "kube-api-access-qx565". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.430620 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.435494 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7714ed30-3730-4a63-8d4d-2b7e097cadbc-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.468359 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/7714ed30-3730-4a63-8d4d-2b7e097cadbc-pod-info" (OuterVolumeSpecName: "pod-info") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.478401 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.485827 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-config-data" (OuterVolumeSpecName: "config-data") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500033 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500073 4669 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500083 4669 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7714ed30-3730-4a63-8d4d-2b7e097cadbc-pod-info\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500091 4669 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7714ed30-3730-4a63-8d4d-2b7e097cadbc-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500102 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qx565\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-kube-api-access-qx565\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500112 4669 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500121 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.500130 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.559949 4669 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.595325 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-server-conf" (OuterVolumeSpecName: "server-conf") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.605696 4669 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7714ed30-3730-4a63-8d4d-2b7e097cadbc-server-conf\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.605728 4669 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.650089 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "7714ed30-3730-4a63-8d4d-2b7e097cadbc" (UID: "7714ed30-3730-4a63-8d4d-2b7e097cadbc"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.707211 4669 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7714ed30-3730-4a63-8d4d-2b7e097cadbc-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.911826 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"7714ed30-3730-4a63-8d4d-2b7e097cadbc","Type":"ContainerDied","Data":"916bc480292e92185f21c1c0aefa4c60cb12aa00d2e694dfa64161240b1abd88"} Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.911893 4669 scope.go:117] "RemoveContainer" containerID="a78364c470182cf19c6d95b01f8816089a6bc59167765da5cec24e56ca46dcb6" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.912031 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.915339 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"61575229-df2e-466a-858a-02d9fa0c1e79","Type":"ContainerStarted","Data":"2ae0cff9d3545f11dee584b2263782ac14b757814744b68630466d9b1e1848fd"} Dec 10 15:42:32 crc kubenswrapper[4669]: I1210 15:42:32.999011 4669 scope.go:117] "RemoveContainer" containerID="f1101581d3a1607b98d6dbff73d1eae1d9b3c53a86cade61351b25b3c4420773" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.006168 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.025155 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.089130 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:42:33 crc kubenswrapper[4669]: E1210 15:42:33.089850 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="setup-container" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.089980 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="setup-container" Dec 10 15:42:33 crc kubenswrapper[4669]: E1210 15:42:33.090062 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="rabbitmq" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.090159 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="rabbitmq" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.090527 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" containerName="rabbitmq" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.091809 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.095573 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.095756 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.095846 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.095940 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-6jbw2" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.103479 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.103727 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.103919 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.137174 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216079 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216128 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216187 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmhmq\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-kube-api-access-kmhmq\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216327 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216471 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216573 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216669 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216711 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216768 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fe16c440-f893-46cb-b038-536568c85b5b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.216945 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fe16c440-f893-46cb-b038-536568c85b5b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.217027 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319116 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319200 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319250 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319288 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fe16c440-f893-46cb-b038-536568c85b5b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319346 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fe16c440-f893-46cb-b038-536568c85b5b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319377 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319413 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319436 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319482 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmhmq\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-kube-api-access-kmhmq\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319515 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319555 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.319948 4669 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.320711 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.320757 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.321137 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.321442 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.322021 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/fe16c440-f893-46cb-b038-536568c85b5b-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.323190 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/fe16c440-f893-46cb-b038-536568c85b5b-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.323558 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/fe16c440-f893-46cb-b038-536568c85b5b-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.324160 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.338953 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.341530 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmhmq\" (UniqueName: \"kubernetes.io/projected/fe16c440-f893-46cb-b038-536568c85b5b-kube-api-access-kmhmq\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.354766 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"fe16c440-f893-46cb-b038-536568c85b5b\") " pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.408191 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:42:33 crc kubenswrapper[4669]: I1210 15:42:33.917063 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 10 15:42:33 crc kubenswrapper[4669]: W1210 15:42:33.917983 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe16c440_f893_46cb_b038_536568c85b5b.slice/crio-4ddcaf403d4502cb9ad337a50fbaa3111caa19483eb856adf7f1a86771eab6b6 WatchSource:0}: Error finding container 4ddcaf403d4502cb9ad337a50fbaa3111caa19483eb856adf7f1a86771eab6b6: Status 404 returned error can't find the container with id 4ddcaf403d4502cb9ad337a50fbaa3111caa19483eb856adf7f1a86771eab6b6 Dec 10 15:42:34 crc kubenswrapper[4669]: I1210 15:42:34.413745 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7714ed30-3730-4a63-8d4d-2b7e097cadbc" path="/var/lib/kubelet/pods/7714ed30-3730-4a63-8d4d-2b7e097cadbc/volumes" Dec 10 15:42:34 crc kubenswrapper[4669]: I1210 15:42:34.939985 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fe16c440-f893-46cb-b038-536568c85b5b","Type":"ContainerStarted","Data":"4ddcaf403d4502cb9ad337a50fbaa3111caa19483eb856adf7f1a86771eab6b6"} Dec 10 15:42:35 crc kubenswrapper[4669]: I1210 15:42:35.966258 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fe16c440-f893-46cb-b038-536568c85b5b","Type":"ContainerStarted","Data":"22e615fe22fb1ac570f27d471d16171cf8ea2327ad08919b712536466e4450bc"} Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.046840 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-2629b"] Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.048319 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.049843 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.060652 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-2629b"] Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.173961 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.174001 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pksb\" (UniqueName: \"kubernetes.io/projected/65d25955-6686-4721-879f-5362213ba467-kube-api-access-9pksb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.174060 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-config\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.174093 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.174155 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.174175 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-dns-svc\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.276385 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.276437 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-dns-svc\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.276549 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.276574 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pksb\" (UniqueName: \"kubernetes.io/projected/65d25955-6686-4721-879f-5362213ba467-kube-api-access-9pksb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.276703 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-config\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.276766 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.277671 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-config\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.277777 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-dns-svc\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.277829 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-openstack-edpm-ipam\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.278026 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-nb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.278436 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-sb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.296835 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pksb\" (UniqueName: \"kubernetes.io/projected/65d25955-6686-4721-879f-5362213ba467-kube-api-access-9pksb\") pod \"dnsmasq-dns-578b8d767c-2629b\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.365381 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:36 crc kubenswrapper[4669]: W1210 15:42:36.806196 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65d25955_6686_4721_879f_5362213ba467.slice/crio-518d9609df9e01ce3e76bf0022cc10e2866cba9d60ab47b503902626bc893065 WatchSource:0}: Error finding container 518d9609df9e01ce3e76bf0022cc10e2866cba9d60ab47b503902626bc893065: Status 404 returned error can't find the container with id 518d9609df9e01ce3e76bf0022cc10e2866cba9d60ab47b503902626bc893065 Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.814735 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-2629b"] Dec 10 15:42:36 crc kubenswrapper[4669]: I1210 15:42:36.976475 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-2629b" event={"ID":"65d25955-6686-4721-879f-5362213ba467","Type":"ContainerStarted","Data":"518d9609df9e01ce3e76bf0022cc10e2866cba9d60ab47b503902626bc893065"} Dec 10 15:42:37 crc kubenswrapper[4669]: I1210 15:42:37.989061 4669 generic.go:334] "Generic (PLEG): container finished" podID="65d25955-6686-4721-879f-5362213ba467" containerID="7461cc6e124d44b2a9c8947872293540726b9614a941e3951de32aa56547e53c" exitCode=0 Dec 10 15:42:37 crc kubenswrapper[4669]: I1210 15:42:37.989104 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-2629b" event={"ID":"65d25955-6686-4721-879f-5362213ba467","Type":"ContainerDied","Data":"7461cc6e124d44b2a9c8947872293540726b9614a941e3951de32aa56547e53c"} Dec 10 15:42:39 crc kubenswrapper[4669]: I1210 15:42:39.000832 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-2629b" event={"ID":"65d25955-6686-4721-879f-5362213ba467","Type":"ContainerStarted","Data":"2a6d9185781a83bdf63ecde40df92aac3d28fe20a58a1ebf50493b97ac85d0b7"} Dec 10 15:42:39 crc kubenswrapper[4669]: I1210 15:42:39.001190 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:39 crc kubenswrapper[4669]: I1210 15:42:39.032151 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-578b8d767c-2629b" podStartSLOduration=3.032131228 podStartE2EDuration="3.032131228s" podCreationTimestamp="2025-12-10 15:42:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:42:39.020852689 +0000 UTC m=+1332.937799356" watchObservedRunningTime="2025-12-10 15:42:39.032131228 +0000 UTC m=+1332.949077865" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.367587 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.452071 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-b5wjd"] Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.452947 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerName="dnsmasq-dns" containerID="cri-o://f502b646e18d906ab39dfad37d1f7a8641a37e67cebd79f00847c45a797d28a0" gracePeriod=10 Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.650726 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-hmjvp"] Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.652379 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.660333 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-hmjvp"] Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.783105 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.783224 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thsb2\" (UniqueName: \"kubernetes.io/projected/d4ae1a29-b756-4385-ac2e-834b97397c4a-kube-api-access-thsb2\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.783253 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-dns-svc\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.783454 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.783673 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-config\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.783722 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.888182 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.888308 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.888357 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thsb2\" (UniqueName: \"kubernetes.io/projected/d4ae1a29-b756-4385-ac2e-834b97397c4a-kube-api-access-thsb2\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.888376 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-dns-svc\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.888412 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.888456 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-config\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.889312 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-ovsdbserver-sb\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.889383 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-ovsdbserver-nb\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.901095 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-dns-svc\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.904238 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-openstack-edpm-ipam\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.904346 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d4ae1a29-b756-4385-ac2e-834b97397c4a-config\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.934567 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thsb2\" (UniqueName: \"kubernetes.io/projected/d4ae1a29-b756-4385-ac2e-834b97397c4a-kube-api-access-thsb2\") pod \"dnsmasq-dns-667ff9c869-hmjvp\" (UID: \"d4ae1a29-b756-4385-ac2e-834b97397c4a\") " pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:46 crc kubenswrapper[4669]: I1210 15:42:46.974660 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.091149 4669 generic.go:334] "Generic (PLEG): container finished" podID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerID="f502b646e18d906ab39dfad37d1f7a8641a37e67cebd79f00847c45a797d28a0" exitCode=0 Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.091393 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" event={"ID":"bfde7108-9097-4b59-b5a2-24bfa50f9884","Type":"ContainerDied","Data":"f502b646e18d906ab39dfad37d1f7a8641a37e67cebd79f00847c45a797d28a0"} Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.091500 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" event={"ID":"bfde7108-9097-4b59-b5a2-24bfa50f9884","Type":"ContainerDied","Data":"cc9aa936844c2e8fbdf0fb03c421d7cb1f44cf85df3cb006a65b729676e8fe32"} Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.091574 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc9aa936844c2e8fbdf0fb03c421d7cb1f44cf85df3cb006a65b729676e8fe32" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.111031 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.204053 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-sb\") pod \"bfde7108-9097-4b59-b5a2-24bfa50f9884\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.204101 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-config\") pod \"bfde7108-9097-4b59-b5a2-24bfa50f9884\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.204208 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-nb\") pod \"bfde7108-9097-4b59-b5a2-24bfa50f9884\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.204259 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-dns-svc\") pod \"bfde7108-9097-4b59-b5a2-24bfa50f9884\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.204436 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wz57t\" (UniqueName: \"kubernetes.io/projected/bfde7108-9097-4b59-b5a2-24bfa50f9884-kube-api-access-wz57t\") pod \"bfde7108-9097-4b59-b5a2-24bfa50f9884\" (UID: \"bfde7108-9097-4b59-b5a2-24bfa50f9884\") " Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.258681 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bfde7108-9097-4b59-b5a2-24bfa50f9884-kube-api-access-wz57t" (OuterVolumeSpecName: "kube-api-access-wz57t") pod "bfde7108-9097-4b59-b5a2-24bfa50f9884" (UID: "bfde7108-9097-4b59-b5a2-24bfa50f9884"). InnerVolumeSpecName "kube-api-access-wz57t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.280872 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bfde7108-9097-4b59-b5a2-24bfa50f9884" (UID: "bfde7108-9097-4b59-b5a2-24bfa50f9884"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.312704 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wz57t\" (UniqueName: \"kubernetes.io/projected/bfde7108-9097-4b59-b5a2-24bfa50f9884-kube-api-access-wz57t\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.312738 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.319477 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bfde7108-9097-4b59-b5a2-24bfa50f9884" (UID: "bfde7108-9097-4b59-b5a2-24bfa50f9884"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.338478 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-config" (OuterVolumeSpecName: "config") pod "bfde7108-9097-4b59-b5a2-24bfa50f9884" (UID: "bfde7108-9097-4b59-b5a2-24bfa50f9884"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.365014 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bfde7108-9097-4b59-b5a2-24bfa50f9884" (UID: "bfde7108-9097-4b59-b5a2-24bfa50f9884"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.414412 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.415911 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.416042 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bfde7108-9097-4b59-b5a2-24bfa50f9884-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:47 crc kubenswrapper[4669]: I1210 15:42:47.559097 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-667ff9c869-hmjvp"] Dec 10 15:42:47 crc kubenswrapper[4669]: W1210 15:42:47.563890 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4ae1a29_b756_4385_ac2e_834b97397c4a.slice/crio-d11ca1b84cf757a6d4fccb44654ea50c41c007444da20ca4232a17a6f7f66b34 WatchSource:0}: Error finding container d11ca1b84cf757a6d4fccb44654ea50c41c007444da20ca4232a17a6f7f66b34: Status 404 returned error can't find the container with id d11ca1b84cf757a6d4fccb44654ea50c41c007444da20ca4232a17a6f7f66b34 Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.100987 4669 generic.go:334] "Generic (PLEG): container finished" podID="d4ae1a29-b756-4385-ac2e-834b97397c4a" containerID="5ca1808d518b875c83ac4ef4672c2d5d91a2c0ea0e971f42884800f47083a4f9" exitCode=0 Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.101048 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" event={"ID":"d4ae1a29-b756-4385-ac2e-834b97397c4a","Type":"ContainerDied","Data":"5ca1808d518b875c83ac4ef4672c2d5d91a2c0ea0e971f42884800f47083a4f9"} Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.101079 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" event={"ID":"d4ae1a29-b756-4385-ac2e-834b97397c4a","Type":"ContainerStarted","Data":"d11ca1b84cf757a6d4fccb44654ea50c41c007444da20ca4232a17a6f7f66b34"} Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.101127 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68d4b6d797-b5wjd" Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.223294 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-b5wjd"] Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.232108 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68d4b6d797-b5wjd"] Dec 10 15:42:48 crc kubenswrapper[4669]: I1210 15:42:48.407069 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" path="/var/lib/kubelet/pods/bfde7108-9097-4b59-b5a2-24bfa50f9884/volumes" Dec 10 15:42:49 crc kubenswrapper[4669]: I1210 15:42:49.115404 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" event={"ID":"d4ae1a29-b756-4385-ac2e-834b97397c4a","Type":"ContainerStarted","Data":"feeaf05407c7e07fb6b6d620546ccdd2cf6eff80e8f007172f1e8fa9cd92c9a5"} Dec 10 15:42:49 crc kubenswrapper[4669]: I1210 15:42:49.115587 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:49 crc kubenswrapper[4669]: I1210 15:42:49.154344 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" podStartSLOduration=3.154320749 podStartE2EDuration="3.154320749s" podCreationTimestamp="2025-12-10 15:42:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:42:49.144432044 +0000 UTC m=+1343.061378721" watchObservedRunningTime="2025-12-10 15:42:49.154320749 +0000 UTC m=+1343.071267376" Dec 10 15:42:56 crc kubenswrapper[4669]: I1210 15:42:56.977412 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-667ff9c869-hmjvp" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.070643 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-2629b"] Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.071351 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-578b8d767c-2629b" podUID="65d25955-6686-4721-879f-5362213ba467" containerName="dnsmasq-dns" containerID="cri-o://2a6d9185781a83bdf63ecde40df92aac3d28fe20a58a1ebf50493b97ac85d0b7" gracePeriod=10 Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.233795 4669 generic.go:334] "Generic (PLEG): container finished" podID="65d25955-6686-4721-879f-5362213ba467" containerID="2a6d9185781a83bdf63ecde40df92aac3d28fe20a58a1ebf50493b97ac85d0b7" exitCode=0 Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.234063 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-2629b" event={"ID":"65d25955-6686-4721-879f-5362213ba467","Type":"ContainerDied","Data":"2a6d9185781a83bdf63ecde40df92aac3d28fe20a58a1ebf50493b97ac85d0b7"} Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.527721 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.711040 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-openstack-edpm-ipam\") pod \"65d25955-6686-4721-879f-5362213ba467\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.711116 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-nb\") pod \"65d25955-6686-4721-879f-5362213ba467\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.711150 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-sb\") pod \"65d25955-6686-4721-879f-5362213ba467\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.711172 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-config\") pod \"65d25955-6686-4721-879f-5362213ba467\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.711206 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9pksb\" (UniqueName: \"kubernetes.io/projected/65d25955-6686-4721-879f-5362213ba467-kube-api-access-9pksb\") pod \"65d25955-6686-4721-879f-5362213ba467\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.711335 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-dns-svc\") pod \"65d25955-6686-4721-879f-5362213ba467\" (UID: \"65d25955-6686-4721-879f-5362213ba467\") " Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.727497 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65d25955-6686-4721-879f-5362213ba467-kube-api-access-9pksb" (OuterVolumeSpecName: "kube-api-access-9pksb") pod "65d25955-6686-4721-879f-5362213ba467" (UID: "65d25955-6686-4721-879f-5362213ba467"). InnerVolumeSpecName "kube-api-access-9pksb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.764573 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-config" (OuterVolumeSpecName: "config") pod "65d25955-6686-4721-879f-5362213ba467" (UID: "65d25955-6686-4721-879f-5362213ba467"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.766305 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "65d25955-6686-4721-879f-5362213ba467" (UID: "65d25955-6686-4721-879f-5362213ba467"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.772364 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "65d25955-6686-4721-879f-5362213ba467" (UID: "65d25955-6686-4721-879f-5362213ba467"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.774480 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "65d25955-6686-4721-879f-5362213ba467" (UID: "65d25955-6686-4721-879f-5362213ba467"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.779210 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "65d25955-6686-4721-879f-5362213ba467" (UID: "65d25955-6686-4721-879f-5362213ba467"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.813379 4669 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.813404 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.813413 4669 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.813421 4669 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-config\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.813432 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9pksb\" (UniqueName: \"kubernetes.io/projected/65d25955-6686-4721-879f-5362213ba467-kube-api-access-9pksb\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:57 crc kubenswrapper[4669]: I1210 15:42:57.813441 4669 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/65d25955-6686-4721-879f-5362213ba467-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.245281 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-578b8d767c-2629b" event={"ID":"65d25955-6686-4721-879f-5362213ba467","Type":"ContainerDied","Data":"518d9609df9e01ce3e76bf0022cc10e2866cba9d60ab47b503902626bc893065"} Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.245332 4669 scope.go:117] "RemoveContainer" containerID="2a6d9185781a83bdf63ecde40df92aac3d28fe20a58a1ebf50493b97ac85d0b7" Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.245336 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-578b8d767c-2629b" Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.279158 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-2629b"] Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.284951 4669 scope.go:117] "RemoveContainer" containerID="7461cc6e124d44b2a9c8947872293540726b9614a941e3951de32aa56547e53c" Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.287853 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-578b8d767c-2629b"] Dec 10 15:42:58 crc kubenswrapper[4669]: I1210 15:42:58.408385 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65d25955-6686-4721-879f-5362213ba467" path="/var/lib/kubelet/pods/65d25955-6686-4721-879f-5362213ba467/volumes" Dec 10 15:43:05 crc kubenswrapper[4669]: I1210 15:43:05.312425 4669 generic.go:334] "Generic (PLEG): container finished" podID="61575229-df2e-466a-858a-02d9fa0c1e79" containerID="2ae0cff9d3545f11dee584b2263782ac14b757814744b68630466d9b1e1848fd" exitCode=0 Dec 10 15:43:05 crc kubenswrapper[4669]: I1210 15:43:05.312518 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"61575229-df2e-466a-858a-02d9fa0c1e79","Type":"ContainerDied","Data":"2ae0cff9d3545f11dee584b2263782ac14b757814744b68630466d9b1e1848fd"} Dec 10 15:43:06 crc kubenswrapper[4669]: I1210 15:43:06.327273 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"61575229-df2e-466a-858a-02d9fa0c1e79","Type":"ContainerStarted","Data":"0a5ef7a770c7da4fc89b9f1fbd219b9f280b940ae5a7fe437acf8c6dbc443cef"} Dec 10 15:43:06 crc kubenswrapper[4669]: I1210 15:43:06.329368 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 10 15:43:06 crc kubenswrapper[4669]: I1210 15:43:06.363495 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.363468268 podStartE2EDuration="36.363468268s" podCreationTimestamp="2025-12-10 15:42:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:43:06.350503989 +0000 UTC m=+1360.267450616" watchObservedRunningTime="2025-12-10 15:43:06.363468268 +0000 UTC m=+1360.280414915" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.366908 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2"] Dec 10 15:43:07 crc kubenswrapper[4669]: E1210 15:43:07.367599 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d25955-6686-4721-879f-5362213ba467" containerName="init" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.367613 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d25955-6686-4721-879f-5362213ba467" containerName="init" Dec 10 15:43:07 crc kubenswrapper[4669]: E1210 15:43:07.367631 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerName="init" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.367637 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerName="init" Dec 10 15:43:07 crc kubenswrapper[4669]: E1210 15:43:07.367646 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerName="dnsmasq-dns" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.367654 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerName="dnsmasq-dns" Dec 10 15:43:07 crc kubenswrapper[4669]: E1210 15:43:07.367682 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65d25955-6686-4721-879f-5362213ba467" containerName="dnsmasq-dns" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.367688 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="65d25955-6686-4721-879f-5362213ba467" containerName="dnsmasq-dns" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.367853 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="bfde7108-9097-4b59-b5a2-24bfa50f9884" containerName="dnsmasq-dns" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.367865 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="65d25955-6686-4721-879f-5362213ba467" containerName="dnsmasq-dns" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.368613 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.370241 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.370964 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.370971 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.375608 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.388128 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2"] Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.470754 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.470804 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.470826 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.470977 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcjn8\" (UniqueName: \"kubernetes.io/projected/a9ab686a-606a-4af1-83e6-42db47c6c650-kube-api-access-zcjn8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.572487 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcjn8\" (UniqueName: \"kubernetes.io/projected/a9ab686a-606a-4af1-83e6-42db47c6c650-kube-api-access-zcjn8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.572618 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.572649 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.572678 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.579067 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.579656 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.583764 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.593607 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcjn8\" (UniqueName: \"kubernetes.io/projected/a9ab686a-606a-4af1-83e6-42db47c6c650-kube-api-access-zcjn8\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:07 crc kubenswrapper[4669]: I1210 15:43:07.689018 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:08 crc kubenswrapper[4669]: I1210 15:43:08.300028 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2"] Dec 10 15:43:08 crc kubenswrapper[4669]: I1210 15:43:08.363464 4669 generic.go:334] "Generic (PLEG): container finished" podID="fe16c440-f893-46cb-b038-536568c85b5b" containerID="22e615fe22fb1ac570f27d471d16171cf8ea2327ad08919b712536466e4450bc" exitCode=0 Dec 10 15:43:08 crc kubenswrapper[4669]: I1210 15:43:08.363511 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fe16c440-f893-46cb-b038-536568c85b5b","Type":"ContainerDied","Data":"22e615fe22fb1ac570f27d471d16171cf8ea2327ad08919b712536466e4450bc"} Dec 10 15:43:09 crc kubenswrapper[4669]: I1210 15:43:09.380106 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"fe16c440-f893-46cb-b038-536568c85b5b","Type":"ContainerStarted","Data":"b968d2fb4892166a6754ffde5c72aafe12392aed22ee7ea1c4dbb23d13d84fbb"} Dec 10 15:43:09 crc kubenswrapper[4669]: I1210 15:43:09.380668 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:43:09 crc kubenswrapper[4669]: I1210 15:43:09.381747 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" event={"ID":"a9ab686a-606a-4af1-83e6-42db47c6c650","Type":"ContainerStarted","Data":"cbf1776193dd20ef24f159f68eb7eba201b9c039d746d2cee869488ecfaa9595"} Dec 10 15:43:09 crc kubenswrapper[4669]: I1210 15:43:09.418140 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.418119988 podStartE2EDuration="36.418119988s" podCreationTimestamp="2025-12-10 15:42:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 15:43:09.406071461 +0000 UTC m=+1363.323018088" watchObservedRunningTime="2025-12-10 15:43:09.418119988 +0000 UTC m=+1363.335066615" Dec 10 15:43:20 crc kubenswrapper[4669]: I1210 15:43:20.575458 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 10 15:43:21 crc kubenswrapper[4669]: I1210 15:43:21.532541 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" event={"ID":"a9ab686a-606a-4af1-83e6-42db47c6c650","Type":"ContainerStarted","Data":"36ffdaa29de5c258d9136f3acd218c1b1938753e7c35f77bfff2495d417ddbfb"} Dec 10 15:43:21 crc kubenswrapper[4669]: I1210 15:43:21.568414 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" podStartSLOduration=1.857040851 podStartE2EDuration="14.568394502s" podCreationTimestamp="2025-12-10 15:43:07 +0000 UTC" firstStartedPulling="2025-12-10 15:43:08.348295994 +0000 UTC m=+1362.265242611" lastFinishedPulling="2025-12-10 15:43:21.059649635 +0000 UTC m=+1374.976596262" observedRunningTime="2025-12-10 15:43:21.565717286 +0000 UTC m=+1375.482663913" watchObservedRunningTime="2025-12-10 15:43:21.568394502 +0000 UTC m=+1375.485341129" Dec 10 15:43:23 crc kubenswrapper[4669]: I1210 15:43:23.410617 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 10 15:43:28 crc kubenswrapper[4669]: I1210 15:43:28.745251 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:43:28 crc kubenswrapper[4669]: I1210 15:43:28.745782 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:43:34 crc kubenswrapper[4669]: I1210 15:43:34.670933 4669 generic.go:334] "Generic (PLEG): container finished" podID="a9ab686a-606a-4af1-83e6-42db47c6c650" containerID="36ffdaa29de5c258d9136f3acd218c1b1938753e7c35f77bfff2495d417ddbfb" exitCode=0 Dec 10 15:43:34 crc kubenswrapper[4669]: I1210 15:43:34.671015 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" event={"ID":"a9ab686a-606a-4af1-83e6-42db47c6c650","Type":"ContainerDied","Data":"36ffdaa29de5c258d9136f3acd218c1b1938753e7c35f77bfff2495d417ddbfb"} Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.134858 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.272902 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-ssh-key\") pod \"a9ab686a-606a-4af1-83e6-42db47c6c650\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.273094 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-repo-setup-combined-ca-bundle\") pod \"a9ab686a-606a-4af1-83e6-42db47c6c650\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.273208 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-inventory\") pod \"a9ab686a-606a-4af1-83e6-42db47c6c650\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.273619 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcjn8\" (UniqueName: \"kubernetes.io/projected/a9ab686a-606a-4af1-83e6-42db47c6c650-kube-api-access-zcjn8\") pod \"a9ab686a-606a-4af1-83e6-42db47c6c650\" (UID: \"a9ab686a-606a-4af1-83e6-42db47c6c650\") " Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.279090 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ab686a-606a-4af1-83e6-42db47c6c650-kube-api-access-zcjn8" (OuterVolumeSpecName: "kube-api-access-zcjn8") pod "a9ab686a-606a-4af1-83e6-42db47c6c650" (UID: "a9ab686a-606a-4af1-83e6-42db47c6c650"). InnerVolumeSpecName "kube-api-access-zcjn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.280838 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "a9ab686a-606a-4af1-83e6-42db47c6c650" (UID: "a9ab686a-606a-4af1-83e6-42db47c6c650"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.306360 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a9ab686a-606a-4af1-83e6-42db47c6c650" (UID: "a9ab686a-606a-4af1-83e6-42db47c6c650"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.311597 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-inventory" (OuterVolumeSpecName: "inventory") pod "a9ab686a-606a-4af1-83e6-42db47c6c650" (UID: "a9ab686a-606a-4af1-83e6-42db47c6c650"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.376393 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcjn8\" (UniqueName: \"kubernetes.io/projected/a9ab686a-606a-4af1-83e6-42db47c6c650-kube-api-access-zcjn8\") on node \"crc\" DevicePath \"\"" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.376705 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.376788 4669 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.376873 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a9ab686a-606a-4af1-83e6-42db47c6c650-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.694720 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" event={"ID":"a9ab686a-606a-4af1-83e6-42db47c6c650","Type":"ContainerDied","Data":"cbf1776193dd20ef24f159f68eb7eba201b9c039d746d2cee869488ecfaa9595"} Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.694774 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbf1776193dd20ef24f159f68eb7eba201b9c039d746d2cee869488ecfaa9595" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.694837 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.788983 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns"] Dec 10 15:43:36 crc kubenswrapper[4669]: E1210 15:43:36.789764 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ab686a-606a-4af1-83e6-42db47c6c650" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.789791 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ab686a-606a-4af1-83e6-42db47c6c650" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.790045 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ab686a-606a-4af1-83e6-42db47c6c650" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.796111 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.800372 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.800692 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.800800 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.800719 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.819786 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns"] Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.887884 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7876\" (UniqueName: \"kubernetes.io/projected/2919610d-6d25-4181-a177-04920d27ee8d-kube-api-access-t7876\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.888139 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.888340 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.888466 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.991179 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7876\" (UniqueName: \"kubernetes.io/projected/2919610d-6d25-4181-a177-04920d27ee8d-kube-api-access-t7876\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.991248 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.991283 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.991313 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.995371 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.997668 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:36 crc kubenswrapper[4669]: I1210 15:43:36.998269 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:37 crc kubenswrapper[4669]: I1210 15:43:37.012530 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7876\" (UniqueName: \"kubernetes.io/projected/2919610d-6d25-4181-a177-04920d27ee8d-kube-api-access-t7876\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-2csns\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:37 crc kubenswrapper[4669]: I1210 15:43:37.117455 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:43:37 crc kubenswrapper[4669]: I1210 15:43:37.688371 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns"] Dec 10 15:43:37 crc kubenswrapper[4669]: I1210 15:43:37.705365 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" event={"ID":"2919610d-6d25-4181-a177-04920d27ee8d","Type":"ContainerStarted","Data":"aa17824b9063f3744b1ba9afb6046a0efa70d09340fcb7debc1d464be2050170"} Dec 10 15:43:38 crc kubenswrapper[4669]: I1210 15:43:38.718398 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" event={"ID":"2919610d-6d25-4181-a177-04920d27ee8d","Type":"ContainerStarted","Data":"559e4b97a9ce8c2bc22a1520735d244cb75a5f8347355471a3225452adb5c085"} Dec 10 15:43:38 crc kubenswrapper[4669]: I1210 15:43:38.763480 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" podStartSLOduration=2.340963507 podStartE2EDuration="2.763451741s" podCreationTimestamp="2025-12-10 15:43:36 +0000 UTC" firstStartedPulling="2025-12-10 15:43:37.681763005 +0000 UTC m=+1391.598709642" lastFinishedPulling="2025-12-10 15:43:38.104251249 +0000 UTC m=+1392.021197876" observedRunningTime="2025-12-10 15:43:38.741568662 +0000 UTC m=+1392.658515299" watchObservedRunningTime="2025-12-10 15:43:38.763451741 +0000 UTC m=+1392.680398368" Dec 10 15:43:58 crc kubenswrapper[4669]: I1210 15:43:58.744797 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:43:58 crc kubenswrapper[4669]: I1210 15:43:58.745262 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:44:28 crc kubenswrapper[4669]: I1210 15:44:28.744770 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:44:28 crc kubenswrapper[4669]: I1210 15:44:28.745490 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:44:28 crc kubenswrapper[4669]: I1210 15:44:28.745539 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:44:28 crc kubenswrapper[4669]: I1210 15:44:28.746422 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7c083e375e78bcc55b89081b1b91303b8145d8a9d38c789b9d9b1d750a62bcfb"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:44:28 crc kubenswrapper[4669]: I1210 15:44:28.746485 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://7c083e375e78bcc55b89081b1b91303b8145d8a9d38c789b9d9b1d750a62bcfb" gracePeriod=600 Dec 10 15:44:29 crc kubenswrapper[4669]: I1210 15:44:29.212286 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="7c083e375e78bcc55b89081b1b91303b8145d8a9d38c789b9d9b1d750a62bcfb" exitCode=0 Dec 10 15:44:29 crc kubenswrapper[4669]: I1210 15:44:29.212336 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"7c083e375e78bcc55b89081b1b91303b8145d8a9d38c789b9d9b1d750a62bcfb"} Dec 10 15:44:29 crc kubenswrapper[4669]: I1210 15:44:29.212366 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591"} Dec 10 15:44:29 crc kubenswrapper[4669]: I1210 15:44:29.212385 4669 scope.go:117] "RemoveContainer" containerID="706b5365ceb6404033f138d584d9ab3c0d60c4c6dec40f2cfffaa838889f4944" Dec 10 15:44:29 crc kubenswrapper[4669]: I1210 15:44:29.538606 4669 scope.go:117] "RemoveContainer" containerID="e138e24d0576bed82ef59cec3da33bc58610b3784ab3ebf06245daf302b1ff0d" Dec 10 15:44:29 crc kubenswrapper[4669]: I1210 15:44:29.568004 4669 scope.go:117] "RemoveContainer" containerID="5aace8ea46b46968de06d3d4d40e3c80a71c4a88d9aa87a10a380ec4ee021f02" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.150942 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8"] Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.153011 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.156471 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.157555 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.165991 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8"] Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.285798 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/035a2457-15eb-400d-9c5c-0059254c2e45-secret-volume\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.285837 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/035a2457-15eb-400d-9c5c-0059254c2e45-config-volume\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.285871 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zkgn\" (UniqueName: \"kubernetes.io/projected/035a2457-15eb-400d-9c5c-0059254c2e45-kube-api-access-4zkgn\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.388194 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/035a2457-15eb-400d-9c5c-0059254c2e45-secret-volume\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.389370 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/035a2457-15eb-400d-9c5c-0059254c2e45-config-volume\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.389428 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zkgn\" (UniqueName: \"kubernetes.io/projected/035a2457-15eb-400d-9c5c-0059254c2e45-kube-api-access-4zkgn\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.390311 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/035a2457-15eb-400d-9c5c-0059254c2e45-config-volume\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.394894 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/035a2457-15eb-400d-9c5c-0059254c2e45-secret-volume\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.408488 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zkgn\" (UniqueName: \"kubernetes.io/projected/035a2457-15eb-400d-9c5c-0059254c2e45-kube-api-access-4zkgn\") pod \"collect-profiles-29423025-chvw8\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.491033 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:00 crc kubenswrapper[4669]: I1210 15:45:00.953765 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8"] Dec 10 15:45:01 crc kubenswrapper[4669]: I1210 15:45:01.550698 4669 generic.go:334] "Generic (PLEG): container finished" podID="035a2457-15eb-400d-9c5c-0059254c2e45" containerID="9b2e52848bf03383397a3d90018a308e6af09876ee785f667e0288673e118533" exitCode=0 Dec 10 15:45:01 crc kubenswrapper[4669]: I1210 15:45:01.550772 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" event={"ID":"035a2457-15eb-400d-9c5c-0059254c2e45","Type":"ContainerDied","Data":"9b2e52848bf03383397a3d90018a308e6af09876ee785f667e0288673e118533"} Dec 10 15:45:01 crc kubenswrapper[4669]: I1210 15:45:01.551953 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" event={"ID":"035a2457-15eb-400d-9c5c-0059254c2e45","Type":"ContainerStarted","Data":"e3717ad7a320f57ca54164464be170390c4ee47897b3977c404cef9888c37bb6"} Dec 10 15:45:02 crc kubenswrapper[4669]: I1210 15:45:02.868674 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.038755 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/035a2457-15eb-400d-9c5c-0059254c2e45-secret-volume\") pod \"035a2457-15eb-400d-9c5c-0059254c2e45\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.038842 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zkgn\" (UniqueName: \"kubernetes.io/projected/035a2457-15eb-400d-9c5c-0059254c2e45-kube-api-access-4zkgn\") pod \"035a2457-15eb-400d-9c5c-0059254c2e45\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.038959 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/035a2457-15eb-400d-9c5c-0059254c2e45-config-volume\") pod \"035a2457-15eb-400d-9c5c-0059254c2e45\" (UID: \"035a2457-15eb-400d-9c5c-0059254c2e45\") " Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.040253 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/035a2457-15eb-400d-9c5c-0059254c2e45-config-volume" (OuterVolumeSpecName: "config-volume") pod "035a2457-15eb-400d-9c5c-0059254c2e45" (UID: "035a2457-15eb-400d-9c5c-0059254c2e45"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.045097 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/035a2457-15eb-400d-9c5c-0059254c2e45-kube-api-access-4zkgn" (OuterVolumeSpecName: "kube-api-access-4zkgn") pod "035a2457-15eb-400d-9c5c-0059254c2e45" (UID: "035a2457-15eb-400d-9c5c-0059254c2e45"). InnerVolumeSpecName "kube-api-access-4zkgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.047374 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/035a2457-15eb-400d-9c5c-0059254c2e45-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "035a2457-15eb-400d-9c5c-0059254c2e45" (UID: "035a2457-15eb-400d-9c5c-0059254c2e45"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.141486 4669 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/035a2457-15eb-400d-9c5c-0059254c2e45-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.141566 4669 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/035a2457-15eb-400d-9c5c-0059254c2e45-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.141589 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zkgn\" (UniqueName: \"kubernetes.io/projected/035a2457-15eb-400d-9c5c-0059254c2e45-kube-api-access-4zkgn\") on node \"crc\" DevicePath \"\"" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.569713 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" event={"ID":"035a2457-15eb-400d-9c5c-0059254c2e45","Type":"ContainerDied","Data":"e3717ad7a320f57ca54164464be170390c4ee47897b3977c404cef9888c37bb6"} Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.570066 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3717ad7a320f57ca54164464be170390c4ee47897b3977c404cef9888c37bb6" Dec 10 15:45:03 crc kubenswrapper[4669]: I1210 15:45:03.569815 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423025-chvw8" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.662944 4669 scope.go:117] "RemoveContainer" containerID="f5ceb0ad15facabb73d1b75502e5e56dd776dac5e4d377c388824af406d313a8" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.690560 4669 scope.go:117] "RemoveContainer" containerID="02d94dba0bb873fa665594bca91a6fb63cff5588f54aa9eb9a55dce1eefaf15f" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.713331 4669 scope.go:117] "RemoveContainer" containerID="fa8a7edd13630b037e953e6f5c3a595f97923fdcb0d97d297f4d80184d8c457a" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.754428 4669 scope.go:117] "RemoveContainer" containerID="e7d57de32893d4ecce08d0ac516362edf5af917acefe998111c61e6ea428b698" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.810263 4669 scope.go:117] "RemoveContainer" containerID="057bf2460e35c72968457185a63bd7d3661ee4f10113628c9f96dd44c4ddb367" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.848095 4669 scope.go:117] "RemoveContainer" containerID="ee9e17c9f85d4e6a1489ac6c3f0c07944b27c4b5069df6f27d3f4355083ec4da" Dec 10 15:45:29 crc kubenswrapper[4669]: I1210 15:45:29.881619 4669 scope.go:117] "RemoveContainer" containerID="84ec240b32c770e55ae43e4010374b2f77bcbfe82efad9e5d24fe96404ae9d27" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.132187 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wkv2n"] Dec 10 15:46:09 crc kubenswrapper[4669]: E1210 15:46:09.133598 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="035a2457-15eb-400d-9c5c-0059254c2e45" containerName="collect-profiles" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.133630 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="035a2457-15eb-400d-9c5c-0059254c2e45" containerName="collect-profiles" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.133944 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="035a2457-15eb-400d-9c5c-0059254c2e45" containerName="collect-profiles" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.136168 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.143775 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wkv2n"] Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.232532 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7tm79\" (UniqueName: \"kubernetes.io/projected/6ec8e90f-01c1-4212-80b4-5c781d01fd84-kube-api-access-7tm79\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.232804 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-catalog-content\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.233004 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-utilities\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.334631 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-catalog-content\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.334735 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-utilities\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.334848 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7tm79\" (UniqueName: \"kubernetes.io/projected/6ec8e90f-01c1-4212-80b4-5c781d01fd84-kube-api-access-7tm79\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.335354 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-catalog-content\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.335484 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-utilities\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.353073 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7tm79\" (UniqueName: \"kubernetes.io/projected/6ec8e90f-01c1-4212-80b4-5c781d01fd84-kube-api-access-7tm79\") pod \"certified-operators-wkv2n\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:09 crc kubenswrapper[4669]: I1210 15:46:09.484181 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:10 crc kubenswrapper[4669]: I1210 15:46:10.005650 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wkv2n"] Dec 10 15:46:10 crc kubenswrapper[4669]: I1210 15:46:10.222696 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerStarted","Data":"4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460"} Dec 10 15:46:10 crc kubenswrapper[4669]: I1210 15:46:10.222747 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerStarted","Data":"9535bd19683996542b08f59ae8f03766fbfc42e960becf93d15bca337cfdcae5"} Dec 10 15:46:10 crc kubenswrapper[4669]: I1210 15:46:10.224426 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:46:11 crc kubenswrapper[4669]: I1210 15:46:11.250238 4669 generic.go:334] "Generic (PLEG): container finished" podID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerID="4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460" exitCode=0 Dec 10 15:46:11 crc kubenswrapper[4669]: I1210 15:46:11.250372 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerDied","Data":"4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460"} Dec 10 15:46:12 crc kubenswrapper[4669]: I1210 15:46:12.261481 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerStarted","Data":"0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408"} Dec 10 15:46:13 crc kubenswrapper[4669]: I1210 15:46:13.274518 4669 generic.go:334] "Generic (PLEG): container finished" podID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerID="0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408" exitCode=0 Dec 10 15:46:13 crc kubenswrapper[4669]: I1210 15:46:13.274590 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerDied","Data":"0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408"} Dec 10 15:46:15 crc kubenswrapper[4669]: I1210 15:46:15.301572 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerStarted","Data":"91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be"} Dec 10 15:46:15 crc kubenswrapper[4669]: I1210 15:46:15.327658 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wkv2n" podStartSLOduration=1.604697947 podStartE2EDuration="6.327638024s" podCreationTimestamp="2025-12-10 15:46:09 +0000 UTC" firstStartedPulling="2025-12-10 15:46:10.224162175 +0000 UTC m=+1544.141108802" lastFinishedPulling="2025-12-10 15:46:14.947102242 +0000 UTC m=+1548.864048879" observedRunningTime="2025-12-10 15:46:15.324596531 +0000 UTC m=+1549.241543168" watchObservedRunningTime="2025-12-10 15:46:15.327638024 +0000 UTC m=+1549.244584661" Dec 10 15:46:19 crc kubenswrapper[4669]: I1210 15:46:19.484835 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:19 crc kubenswrapper[4669]: I1210 15:46:19.485602 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:19 crc kubenswrapper[4669]: I1210 15:46:19.575903 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:20 crc kubenswrapper[4669]: I1210 15:46:20.442324 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:20 crc kubenswrapper[4669]: I1210 15:46:20.505454 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wkv2n"] Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.375709 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wkv2n" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="registry-server" containerID="cri-o://91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be" gracePeriod=2 Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.811304 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.939703 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7tm79\" (UniqueName: \"kubernetes.io/projected/6ec8e90f-01c1-4212-80b4-5c781d01fd84-kube-api-access-7tm79\") pod \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.939841 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-catalog-content\") pod \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.940005 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-utilities\") pod \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\" (UID: \"6ec8e90f-01c1-4212-80b4-5c781d01fd84\") " Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.941129 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-utilities" (OuterVolumeSpecName: "utilities") pod "6ec8e90f-01c1-4212-80b4-5c781d01fd84" (UID: "6ec8e90f-01c1-4212-80b4-5c781d01fd84"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:46:22 crc kubenswrapper[4669]: I1210 15:46:22.945157 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ec8e90f-01c1-4212-80b4-5c781d01fd84-kube-api-access-7tm79" (OuterVolumeSpecName: "kube-api-access-7tm79") pod "6ec8e90f-01c1-4212-80b4-5c781d01fd84" (UID: "6ec8e90f-01c1-4212-80b4-5c781d01fd84"). InnerVolumeSpecName "kube-api-access-7tm79". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.019532 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ec8e90f-01c1-4212-80b4-5c781d01fd84" (UID: "6ec8e90f-01c1-4212-80b4-5c781d01fd84"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.041890 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.042586 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7tm79\" (UniqueName: \"kubernetes.io/projected/6ec8e90f-01c1-4212-80b4-5c781d01fd84-kube-api-access-7tm79\") on node \"crc\" DevicePath \"\"" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.042612 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ec8e90f-01c1-4212-80b4-5c781d01fd84-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.401088 4669 generic.go:334] "Generic (PLEG): container finished" podID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerID="91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be" exitCode=0 Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.401149 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wkv2n" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.401146 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerDied","Data":"91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be"} Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.401649 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wkv2n" event={"ID":"6ec8e90f-01c1-4212-80b4-5c781d01fd84","Type":"ContainerDied","Data":"9535bd19683996542b08f59ae8f03766fbfc42e960becf93d15bca337cfdcae5"} Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.401681 4669 scope.go:117] "RemoveContainer" containerID="91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.450078 4669 scope.go:117] "RemoveContainer" containerID="0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.466085 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wkv2n"] Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.491864 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wkv2n"] Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.496558 4669 scope.go:117] "RemoveContainer" containerID="4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.546391 4669 scope.go:117] "RemoveContainer" containerID="91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be" Dec 10 15:46:23 crc kubenswrapper[4669]: E1210 15:46:23.547193 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be\": container with ID starting with 91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be not found: ID does not exist" containerID="91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.547249 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be"} err="failed to get container status \"91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be\": rpc error: code = NotFound desc = could not find container \"91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be\": container with ID starting with 91b43e3bec4633eb7d0468c79b9a587c66753289296539b8bd5c3bd499a5a6be not found: ID does not exist" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.547278 4669 scope.go:117] "RemoveContainer" containerID="0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408" Dec 10 15:46:23 crc kubenswrapper[4669]: E1210 15:46:23.547719 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408\": container with ID starting with 0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408 not found: ID does not exist" containerID="0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.547755 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408"} err="failed to get container status \"0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408\": rpc error: code = NotFound desc = could not find container \"0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408\": container with ID starting with 0ff48e2054e7b9ae98a8f1fc44674c913f8c15cf339a02c15b43e6dde2ca6408 not found: ID does not exist" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.547775 4669 scope.go:117] "RemoveContainer" containerID="4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460" Dec 10 15:46:23 crc kubenswrapper[4669]: E1210 15:46:23.548203 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460\": container with ID starting with 4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460 not found: ID does not exist" containerID="4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460" Dec 10 15:46:23 crc kubenswrapper[4669]: I1210 15:46:23.548257 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460"} err="failed to get container status \"4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460\": rpc error: code = NotFound desc = could not find container \"4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460\": container with ID starting with 4a15f3a49fe7ac16290d9b8312b72e9f60c8f8f102a6292ffd60732453bf4460 not found: ID does not exist" Dec 10 15:46:24 crc kubenswrapper[4669]: I1210 15:46:24.408952 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" path="/var/lib/kubelet/pods/6ec8e90f-01c1-4212-80b4-5c781d01fd84/volumes" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.377048 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bhgwg"] Dec 10 15:46:39 crc kubenswrapper[4669]: E1210 15:46:39.378604 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="registry-server" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.378624 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="registry-server" Dec 10 15:46:39 crc kubenswrapper[4669]: E1210 15:46:39.379085 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="extract-utilities" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.379127 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="extract-utilities" Dec 10 15:46:39 crc kubenswrapper[4669]: E1210 15:46:39.379163 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="extract-content" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.379174 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="extract-content" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.380499 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ec8e90f-01c1-4212-80b4-5c781d01fd84" containerName="registry-server" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.382646 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.396537 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bhgwg"] Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.446299 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98f42\" (UniqueName: \"kubernetes.io/projected/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-kube-api-access-98f42\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.446388 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-utilities\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.446539 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-catalog-content\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.548670 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-catalog-content\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.548873 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98f42\" (UniqueName: \"kubernetes.io/projected/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-kube-api-access-98f42\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.548946 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-utilities\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.549771 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-utilities\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.551764 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-catalog-content\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.578349 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98f42\" (UniqueName: \"kubernetes.io/projected/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-kube-api-access-98f42\") pod \"community-operators-bhgwg\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:39 crc kubenswrapper[4669]: I1210 15:46:39.702714 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:40 crc kubenswrapper[4669]: I1210 15:46:40.259162 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bhgwg"] Dec 10 15:46:40 crc kubenswrapper[4669]: I1210 15:46:40.576207 4669 generic.go:334] "Generic (PLEG): container finished" podID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerID="a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7" exitCode=0 Dec 10 15:46:40 crc kubenswrapper[4669]: I1210 15:46:40.576295 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerDied","Data":"a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7"} Dec 10 15:46:40 crc kubenswrapper[4669]: I1210 15:46:40.576356 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerStarted","Data":"1cac331fffcd5a607af2f593962ca9ead7a3c94dd850c8d095ff43c0d34c9d79"} Dec 10 15:46:42 crc kubenswrapper[4669]: I1210 15:46:42.598187 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerStarted","Data":"829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51"} Dec 10 15:46:44 crc kubenswrapper[4669]: I1210 15:46:44.856293 4669 generic.go:334] "Generic (PLEG): container finished" podID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerID="829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51" exitCode=0 Dec 10 15:46:44 crc kubenswrapper[4669]: I1210 15:46:44.856422 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerDied","Data":"829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51"} Dec 10 15:46:46 crc kubenswrapper[4669]: I1210 15:46:46.875716 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerStarted","Data":"759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f"} Dec 10 15:46:46 crc kubenswrapper[4669]: I1210 15:46:46.901790 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bhgwg" podStartSLOduration=3.033398288 podStartE2EDuration="7.901747007s" podCreationTimestamp="2025-12-10 15:46:39 +0000 UTC" firstStartedPulling="2025-12-10 15:46:40.578371581 +0000 UTC m=+1574.495318198" lastFinishedPulling="2025-12-10 15:46:45.44672028 +0000 UTC m=+1579.363666917" observedRunningTime="2025-12-10 15:46:46.899031631 +0000 UTC m=+1580.815978268" watchObservedRunningTime="2025-12-10 15:46:46.901747007 +0000 UTC m=+1580.818693634" Dec 10 15:46:49 crc kubenswrapper[4669]: I1210 15:46:49.703055 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:49 crc kubenswrapper[4669]: I1210 15:46:49.703647 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:49 crc kubenswrapper[4669]: I1210 15:46:49.765001 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:58 crc kubenswrapper[4669]: I1210 15:46:58.744575 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:46:58 crc kubenswrapper[4669]: I1210 15:46:58.745085 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:46:59 crc kubenswrapper[4669]: I1210 15:46:59.747417 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:46:59 crc kubenswrapper[4669]: I1210 15:46:59.799692 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bhgwg"] Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.040595 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bhgwg" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="registry-server" containerID="cri-o://759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f" gracePeriod=2 Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.512448 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.710049 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98f42\" (UniqueName: \"kubernetes.io/projected/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-kube-api-access-98f42\") pod \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.710107 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-catalog-content\") pod \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.710283 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-utilities\") pod \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\" (UID: \"12ffa5e9-57e5-496b-94ff-c5bb1fb98706\") " Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.711162 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-utilities" (OuterVolumeSpecName: "utilities") pod "12ffa5e9-57e5-496b-94ff-c5bb1fb98706" (UID: "12ffa5e9-57e5-496b-94ff-c5bb1fb98706"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.719498 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-kube-api-access-98f42" (OuterVolumeSpecName: "kube-api-access-98f42") pod "12ffa5e9-57e5-496b-94ff-c5bb1fb98706" (UID: "12ffa5e9-57e5-496b-94ff-c5bb1fb98706"). InnerVolumeSpecName "kube-api-access-98f42". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.763554 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "12ffa5e9-57e5-496b-94ff-c5bb1fb98706" (UID: "12ffa5e9-57e5-496b-94ff-c5bb1fb98706"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.812233 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98f42\" (UniqueName: \"kubernetes.io/projected/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-kube-api-access-98f42\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.812920 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:00 crc kubenswrapper[4669]: I1210 15:47:00.812949 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/12ffa5e9-57e5-496b-94ff-c5bb1fb98706-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.054921 4669 generic.go:334] "Generic (PLEG): container finished" podID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerID="759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f" exitCode=0 Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.054979 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerDied","Data":"759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f"} Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.055019 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bhgwg" event={"ID":"12ffa5e9-57e5-496b-94ff-c5bb1fb98706","Type":"ContainerDied","Data":"1cac331fffcd5a607af2f593962ca9ead7a3c94dd850c8d095ff43c0d34c9d79"} Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.055049 4669 scope.go:117] "RemoveContainer" containerID="759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.055262 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bhgwg" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.085898 4669 scope.go:117] "RemoveContainer" containerID="829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.133631 4669 scope.go:117] "RemoveContainer" containerID="a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.135458 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bhgwg"] Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.146768 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bhgwg"] Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.170579 4669 scope.go:117] "RemoveContainer" containerID="759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f" Dec 10 15:47:01 crc kubenswrapper[4669]: E1210 15:47:01.171192 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f\": container with ID starting with 759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f not found: ID does not exist" containerID="759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.171336 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f"} err="failed to get container status \"759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f\": rpc error: code = NotFound desc = could not find container \"759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f\": container with ID starting with 759623bee79ac85ac890c1abb1822bfeaebb3188cb2c2471fdba3a6e9df16d4f not found: ID does not exist" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.171378 4669 scope.go:117] "RemoveContainer" containerID="829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51" Dec 10 15:47:01 crc kubenswrapper[4669]: E1210 15:47:01.171750 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51\": container with ID starting with 829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51 not found: ID does not exist" containerID="829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.171784 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51"} err="failed to get container status \"829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51\": rpc error: code = NotFound desc = could not find container \"829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51\": container with ID starting with 829998da2dead77d38238840317114508b3273816a3203fcae9241e0e7e69b51 not found: ID does not exist" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.171812 4669 scope.go:117] "RemoveContainer" containerID="a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7" Dec 10 15:47:01 crc kubenswrapper[4669]: E1210 15:47:01.172087 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7\": container with ID starting with a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7 not found: ID does not exist" containerID="a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7" Dec 10 15:47:01 crc kubenswrapper[4669]: I1210 15:47:01.172155 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7"} err="failed to get container status \"a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7\": rpc error: code = NotFound desc = could not find container \"a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7\": container with ID starting with a8e72b59d7bdcefe4e82c70db305a0403cc412cbe6e4ff151976708483b830d7 not found: ID does not exist" Dec 10 15:47:02 crc kubenswrapper[4669]: I1210 15:47:02.409791 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" path="/var/lib/kubelet/pods/12ffa5e9-57e5-496b-94ff-c5bb1fb98706/volumes" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.347901 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bvwf2"] Dec 10 15:47:20 crc kubenswrapper[4669]: E1210 15:47:20.349049 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="extract-utilities" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.349064 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="extract-utilities" Dec 10 15:47:20 crc kubenswrapper[4669]: E1210 15:47:20.349091 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="extract-content" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.349100 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="extract-content" Dec 10 15:47:20 crc kubenswrapper[4669]: E1210 15:47:20.349115 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="registry-server" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.349123 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="registry-server" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.349385 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="12ffa5e9-57e5-496b-94ff-c5bb1fb98706" containerName="registry-server" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.350996 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.364742 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bvwf2"] Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.490100 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5kc5\" (UniqueName: \"kubernetes.io/projected/cd322244-a7a0-44a7-9ee3-9975b8724373-kube-api-access-s5kc5\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.490431 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-utilities\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.490480 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-catalog-content\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.605575 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5kc5\" (UniqueName: \"kubernetes.io/projected/cd322244-a7a0-44a7-9ee3-9975b8724373-kube-api-access-s5kc5\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.605647 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-utilities\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.605699 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-catalog-content\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.606148 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-utilities\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.606177 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-catalog-content\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.640483 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5kc5\" (UniqueName: \"kubernetes.io/projected/cd322244-a7a0-44a7-9ee3-9975b8724373-kube-api-access-s5kc5\") pod \"redhat-marketplace-bvwf2\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:20 crc kubenswrapper[4669]: I1210 15:47:20.707019 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:21 crc kubenswrapper[4669]: I1210 15:47:21.143894 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bvwf2"] Dec 10 15:47:21 crc kubenswrapper[4669]: I1210 15:47:21.236889 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerStarted","Data":"594d0acb7ac0004b4ab80cdda6abb75cb4885c2f5898a2e389db73c11acef0cd"} Dec 10 15:47:22 crc kubenswrapper[4669]: I1210 15:47:22.247206 4669 generic.go:334] "Generic (PLEG): container finished" podID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerID="38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822" exitCode=0 Dec 10 15:47:22 crc kubenswrapper[4669]: I1210 15:47:22.247303 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerDied","Data":"38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822"} Dec 10 15:47:24 crc kubenswrapper[4669]: I1210 15:47:24.275691 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerStarted","Data":"19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b"} Dec 10 15:47:24 crc kubenswrapper[4669]: I1210 15:47:24.279481 4669 generic.go:334] "Generic (PLEG): container finished" podID="2919610d-6d25-4181-a177-04920d27ee8d" containerID="559e4b97a9ce8c2bc22a1520735d244cb75a5f8347355471a3225452adb5c085" exitCode=0 Dec 10 15:47:24 crc kubenswrapper[4669]: I1210 15:47:24.279554 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" event={"ID":"2919610d-6d25-4181-a177-04920d27ee8d","Type":"ContainerDied","Data":"559e4b97a9ce8c2bc22a1520735d244cb75a5f8347355471a3225452adb5c085"} Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.292951 4669 generic.go:334] "Generic (PLEG): container finished" podID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerID="19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b" exitCode=0 Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.293060 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerDied","Data":"19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b"} Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.787792 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.833451 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-inventory\") pod \"2919610d-6d25-4181-a177-04920d27ee8d\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.833537 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-ssh-key\") pod \"2919610d-6d25-4181-a177-04920d27ee8d\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.833584 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7876\" (UniqueName: \"kubernetes.io/projected/2919610d-6d25-4181-a177-04920d27ee8d-kube-api-access-t7876\") pod \"2919610d-6d25-4181-a177-04920d27ee8d\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.833653 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-bootstrap-combined-ca-bundle\") pod \"2919610d-6d25-4181-a177-04920d27ee8d\" (UID: \"2919610d-6d25-4181-a177-04920d27ee8d\") " Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.839733 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2919610d-6d25-4181-a177-04920d27ee8d-kube-api-access-t7876" (OuterVolumeSpecName: "kube-api-access-t7876") pod "2919610d-6d25-4181-a177-04920d27ee8d" (UID: "2919610d-6d25-4181-a177-04920d27ee8d"). InnerVolumeSpecName "kube-api-access-t7876". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.839959 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "2919610d-6d25-4181-a177-04920d27ee8d" (UID: "2919610d-6d25-4181-a177-04920d27ee8d"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.859955 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2919610d-6d25-4181-a177-04920d27ee8d" (UID: "2919610d-6d25-4181-a177-04920d27ee8d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.869846 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-inventory" (OuterVolumeSpecName: "inventory") pod "2919610d-6d25-4181-a177-04920d27ee8d" (UID: "2919610d-6d25-4181-a177-04920d27ee8d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.937114 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7876\" (UniqueName: \"kubernetes.io/projected/2919610d-6d25-4181-a177-04920d27ee8d-kube-api-access-t7876\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.937149 4669 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.937159 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:25 crc kubenswrapper[4669]: I1210 15:47:25.937167 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2919610d-6d25-4181-a177-04920d27ee8d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.303250 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" event={"ID":"2919610d-6d25-4181-a177-04920d27ee8d","Type":"ContainerDied","Data":"aa17824b9063f3744b1ba9afb6046a0efa70d09340fcb7debc1d464be2050170"} Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.303282 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-2csns" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.303541 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa17824b9063f3744b1ba9afb6046a0efa70d09340fcb7debc1d464be2050170" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.305793 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerStarted","Data":"22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886"} Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.325199 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bvwf2" podStartSLOduration=2.7071593160000003 podStartE2EDuration="6.325182151s" podCreationTimestamp="2025-12-10 15:47:20 +0000 UTC" firstStartedPulling="2025-12-10 15:47:22.255515352 +0000 UTC m=+1616.172461989" lastFinishedPulling="2025-12-10 15:47:25.873538187 +0000 UTC m=+1619.790484824" observedRunningTime="2025-12-10 15:47:26.323434898 +0000 UTC m=+1620.240381545" watchObservedRunningTime="2025-12-10 15:47:26.325182151 +0000 UTC m=+1620.242128778" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.429915 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9"] Dec 10 15:47:26 crc kubenswrapper[4669]: E1210 15:47:26.432165 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2919610d-6d25-4181-a177-04920d27ee8d" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.432192 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="2919610d-6d25-4181-a177-04920d27ee8d" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.432406 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="2919610d-6d25-4181-a177-04920d27ee8d" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.433003 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.437427 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.437874 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.438044 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.438234 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.457341 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9"] Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.551156 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.551285 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnslr\" (UniqueName: \"kubernetes.io/projected/e1113280-5934-4d11-9449-b96a953ca8d4-kube-api-access-cnslr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.551368 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.653240 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnslr\" (UniqueName: \"kubernetes.io/projected/e1113280-5934-4d11-9449-b96a953ca8d4-kube-api-access-cnslr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.653789 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.653942 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.658709 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.660617 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.673873 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnslr\" (UniqueName: \"kubernetes.io/projected/e1113280-5934-4d11-9449-b96a953ca8d4-kube-api-access-cnslr\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:26 crc kubenswrapper[4669]: I1210 15:47:26.761015 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:47:27 crc kubenswrapper[4669]: I1210 15:47:27.305387 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9"] Dec 10 15:47:27 crc kubenswrapper[4669]: I1210 15:47:27.316419 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" event={"ID":"e1113280-5934-4d11-9449-b96a953ca8d4","Type":"ContainerStarted","Data":"b9575eca7ecf525f01544abb135d42960ccac37b673b79c9be8c2565ebe011a4"} Dec 10 15:47:28 crc kubenswrapper[4669]: I1210 15:47:28.325833 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" event={"ID":"e1113280-5934-4d11-9449-b96a953ca8d4","Type":"ContainerStarted","Data":"b4a26934bbcb88a76fbb30550cacce7bee43e2dcbeeca090322cbbf9e681863e"} Dec 10 15:47:28 crc kubenswrapper[4669]: I1210 15:47:28.349753 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" podStartSLOduration=1.702275636 podStartE2EDuration="2.349733799s" podCreationTimestamp="2025-12-10 15:47:26 +0000 UTC" firstStartedPulling="2025-12-10 15:47:27.294177283 +0000 UTC m=+1621.211123910" lastFinishedPulling="2025-12-10 15:47:27.941635446 +0000 UTC m=+1621.858582073" observedRunningTime="2025-12-10 15:47:28.341356754 +0000 UTC m=+1622.258303401" watchObservedRunningTime="2025-12-10 15:47:28.349733799 +0000 UTC m=+1622.266680426" Dec 10 15:47:28 crc kubenswrapper[4669]: I1210 15:47:28.744684 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:47:28 crc kubenswrapper[4669]: I1210 15:47:28.744749 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.011672 4669 scope.go:117] "RemoveContainer" containerID="bb4e61b3dba556ade94d67f70b1bc59d8944b8adc8dd7a0ce42a3f4da1402620" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.034277 4669 scope.go:117] "RemoveContainer" containerID="f502b646e18d906ab39dfad37d1f7a8641a37e67cebd79f00847c45a797d28a0" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.058785 4669 scope.go:117] "RemoveContainer" containerID="f596fbb51f2a179a88451c082490d0887b740e07d523a0bb028ccd00d47857cc" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.422677 4669 scope.go:117] "RemoveContainer" containerID="75e0bf266f468918f4c7265326b9f377b24b3e615bab4788dcd15991afdad1f6" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.482614 4669 scope.go:117] "RemoveContainer" containerID="5ff8ab0470627842478306d2b84e3ebc29e317b7a3ada9b0a22c4668746502b5" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.514399 4669 scope.go:117] "RemoveContainer" containerID="284ae53b1e774bb72009fdd09a4234705f200a798d5570a7b662a12f3122d8d6" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.578533 4669 scope.go:117] "RemoveContainer" containerID="0f7ea90e4bf87199a4e0ae5a6f13f1527bc1946c014bb239d8698cce3066ed8c" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.708695 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.708748 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:30 crc kubenswrapper[4669]: I1210 15:47:30.769206 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:31 crc kubenswrapper[4669]: I1210 15:47:31.408541 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:31 crc kubenswrapper[4669]: I1210 15:47:31.473288 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bvwf2"] Dec 10 15:47:33 crc kubenswrapper[4669]: I1210 15:47:33.382871 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bvwf2" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="registry-server" containerID="cri-o://22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886" gracePeriod=2 Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.286450 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.404980 4669 generic.go:334] "Generic (PLEG): container finished" podID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerID="22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886" exitCode=0 Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.405094 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bvwf2" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.410888 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5kc5\" (UniqueName: \"kubernetes.io/projected/cd322244-a7a0-44a7-9ee3-9975b8724373-kube-api-access-s5kc5\") pod \"cd322244-a7a0-44a7-9ee3-9975b8724373\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.410988 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-catalog-content\") pod \"cd322244-a7a0-44a7-9ee3-9975b8724373\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.411015 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-utilities\") pod \"cd322244-a7a0-44a7-9ee3-9975b8724373\" (UID: \"cd322244-a7a0-44a7-9ee3-9975b8724373\") " Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.412427 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-utilities" (OuterVolumeSpecName: "utilities") pod "cd322244-a7a0-44a7-9ee3-9975b8724373" (UID: "cd322244-a7a0-44a7-9ee3-9975b8724373"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.413291 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerDied","Data":"22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886"} Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.413356 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bvwf2" event={"ID":"cd322244-a7a0-44a7-9ee3-9975b8724373","Type":"ContainerDied","Data":"594d0acb7ac0004b4ab80cdda6abb75cb4885c2f5898a2e389db73c11acef0cd"} Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.413388 4669 scope.go:117] "RemoveContainer" containerID="22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.426488 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd322244-a7a0-44a7-9ee3-9975b8724373-kube-api-access-s5kc5" (OuterVolumeSpecName: "kube-api-access-s5kc5") pod "cd322244-a7a0-44a7-9ee3-9975b8724373" (UID: "cd322244-a7a0-44a7-9ee3-9975b8724373"). InnerVolumeSpecName "kube-api-access-s5kc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.443157 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cd322244-a7a0-44a7-9ee3-9975b8724373" (UID: "cd322244-a7a0-44a7-9ee3-9975b8724373"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.473422 4669 scope.go:117] "RemoveContainer" containerID="19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.493272 4669 scope.go:117] "RemoveContainer" containerID="38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.513361 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5kc5\" (UniqueName: \"kubernetes.io/projected/cd322244-a7a0-44a7-9ee3-9975b8724373-kube-api-access-s5kc5\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.513399 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.513412 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cd322244-a7a0-44a7-9ee3-9975b8724373-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.543895 4669 scope.go:117] "RemoveContainer" containerID="22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886" Dec 10 15:47:34 crc kubenswrapper[4669]: E1210 15:47:34.544604 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886\": container with ID starting with 22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886 not found: ID does not exist" containerID="22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.544712 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886"} err="failed to get container status \"22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886\": rpc error: code = NotFound desc = could not find container \"22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886\": container with ID starting with 22233badeb313bdbfc0c38afdf5af1cd4c252b0ed2b9bffb1295341d8f6b9886 not found: ID does not exist" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.544773 4669 scope.go:117] "RemoveContainer" containerID="19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b" Dec 10 15:47:34 crc kubenswrapper[4669]: E1210 15:47:34.549598 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b\": container with ID starting with 19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b not found: ID does not exist" containerID="19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.549648 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b"} err="failed to get container status \"19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b\": rpc error: code = NotFound desc = could not find container \"19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b\": container with ID starting with 19cac1a9757bebb89624b0a2e4222218e922552a60cb4acc740496aa90beaa5b not found: ID does not exist" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.549691 4669 scope.go:117] "RemoveContainer" containerID="38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822" Dec 10 15:47:34 crc kubenswrapper[4669]: E1210 15:47:34.549995 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822\": container with ID starting with 38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822 not found: ID does not exist" containerID="38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.550028 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822"} err="failed to get container status \"38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822\": rpc error: code = NotFound desc = could not find container \"38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822\": container with ID starting with 38a1c8a3effbb9c1245bb558a9ba91e39e05deaa59b61c78d2c5699e642ca822 not found: ID does not exist" Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.736859 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bvwf2"] Dec 10 15:47:34 crc kubenswrapper[4669]: I1210 15:47:34.749276 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bvwf2"] Dec 10 15:47:36 crc kubenswrapper[4669]: I1210 15:47:36.415854 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" path="/var/lib/kubelet/pods/cd322244-a7a0-44a7-9ee3-9975b8724373/volumes" Dec 10 15:47:41 crc kubenswrapper[4669]: I1210 15:47:41.047724 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-cc10-account-create-update-f46ph"] Dec 10 15:47:41 crc kubenswrapper[4669]: I1210 15:47:41.058452 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-cc10-account-create-update-f46ph"] Dec 10 15:47:42 crc kubenswrapper[4669]: I1210 15:47:42.410114 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4541fbbe-63d6-478b-9fc6-90eaaa8f67a8" path="/var/lib/kubelet/pods/4541fbbe-63d6-478b-9fc6-90eaaa8f67a8/volumes" Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.043107 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-nh95v"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.056674 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-beb3-account-create-update-ckz64"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.067098 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-nh95v"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.081147 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-d8v5p"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.089472 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-beb3-account-create-update-ckz64"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.098842 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-d8v5p"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.105956 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6e21-account-create-update-qrl4t"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.112841 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6e21-account-create-update-qrl4t"] Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.410792 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="082348c7-f4ba-4369-a41e-d633a92ef9ec" path="/var/lib/kubelet/pods/082348c7-f4ba-4369-a41e-d633a92ef9ec/volumes" Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.411489 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="343b34aa-07cb-45f2-a070-b64466e0d681" path="/var/lib/kubelet/pods/343b34aa-07cb-45f2-a070-b64466e0d681/volumes" Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.412005 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="968aa77c-143b-4324-9736-6b9698cc2867" path="/var/lib/kubelet/pods/968aa77c-143b-4324-9736-6b9698cc2867/volumes" Dec 10 15:47:46 crc kubenswrapper[4669]: I1210 15:47:46.412706 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c607262e-1448-458e-9135-1581237f17e7" path="/var/lib/kubelet/pods/c607262e-1448-458e-9135-1581237f17e7/volumes" Dec 10 15:47:51 crc kubenswrapper[4669]: I1210 15:47:51.035998 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-nt2bl"] Dec 10 15:47:51 crc kubenswrapper[4669]: I1210 15:47:51.048529 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-nt2bl"] Dec 10 15:47:52 crc kubenswrapper[4669]: I1210 15:47:52.419570 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a6b5304-e7ea-4f27-b68e-20da85f0f6f0" path="/var/lib/kubelet/pods/4a6b5304-e7ea-4f27-b68e-20da85f0f6f0/volumes" Dec 10 15:47:58 crc kubenswrapper[4669]: I1210 15:47:58.744989 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:47:58 crc kubenswrapper[4669]: I1210 15:47:58.745725 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:47:58 crc kubenswrapper[4669]: I1210 15:47:58.745799 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:47:58 crc kubenswrapper[4669]: I1210 15:47:58.746880 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:47:58 crc kubenswrapper[4669]: I1210 15:47:58.746979 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" gracePeriod=600 Dec 10 15:47:59 crc kubenswrapper[4669]: E1210 15:47:59.374613 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:47:59 crc kubenswrapper[4669]: I1210 15:47:59.642389 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" exitCode=0 Dec 10 15:47:59 crc kubenswrapper[4669]: I1210 15:47:59.642585 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591"} Dec 10 15:47:59 crc kubenswrapper[4669]: I1210 15:47:59.642840 4669 scope.go:117] "RemoveContainer" containerID="7c083e375e78bcc55b89081b1b91303b8145d8a9d38c789b9d9b1d750a62bcfb" Dec 10 15:47:59 crc kubenswrapper[4669]: I1210 15:47:59.643458 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:47:59 crc kubenswrapper[4669]: E1210 15:47:59.643901 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:48:13 crc kubenswrapper[4669]: I1210 15:48:13.398482 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:48:13 crc kubenswrapper[4669]: E1210 15:48:13.399609 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:48:16 crc kubenswrapper[4669]: I1210 15:48:16.064407 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-d8jx6"] Dec 10 15:48:16 crc kubenswrapper[4669]: I1210 15:48:16.080621 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-d8jx6"] Dec 10 15:48:16 crc kubenswrapper[4669]: I1210 15:48:16.407347 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f0228ea-8dff-4494-925c-db481a3235e8" path="/var/lib/kubelet/pods/2f0228ea-8dff-4494-925c-db481a3235e8/volumes" Dec 10 15:48:23 crc kubenswrapper[4669]: I1210 15:48:23.031757 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-2xgm4"] Dec 10 15:48:23 crc kubenswrapper[4669]: I1210 15:48:23.041923 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-6e58-account-create-update-kp9vk"] Dec 10 15:48:23 crc kubenswrapper[4669]: I1210 15:48:23.051854 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-2xgm4"] Dec 10 15:48:23 crc kubenswrapper[4669]: I1210 15:48:23.059348 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-6e58-account-create-update-kp9vk"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.031583 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-vhhn7"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.040811 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-a904-account-create-update-nz8ns"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.048824 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-s4fqw"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.057452 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-92c4-account-create-update-dchgr"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.064788 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-a904-account-create-update-nz8ns"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.071350 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-92c4-account-create-update-dchgr"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.077629 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-vhhn7"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.084120 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-s4fqw"] Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.412273 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ee39beb-9369-4750-8c57-2afa1e96029e" path="/var/lib/kubelet/pods/4ee39beb-9369-4750-8c57-2afa1e96029e/volumes" Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.413167 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55391db7-2483-46c6-9d71-0915d81eb5ee" path="/var/lib/kubelet/pods/55391db7-2483-46c6-9d71-0915d81eb5ee/volumes" Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.414042 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="639a743d-6218-4ed2-82b1-e898c97906e8" path="/var/lib/kubelet/pods/639a743d-6218-4ed2-82b1-e898c97906e8/volumes" Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.414969 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7125626d-5fb6-43fc-ade4-912d54fb0b76" path="/var/lib/kubelet/pods/7125626d-5fb6-43fc-ade4-912d54fb0b76/volumes" Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.416512 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c21c00ce-da75-42fb-998d-d41f116d076f" path="/var/lib/kubelet/pods/c21c00ce-da75-42fb-998d-d41f116d076f/volumes" Dec 10 15:48:24 crc kubenswrapper[4669]: I1210 15:48:24.417475 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5f2fd35-f21f-41d2-a254-3460f66af1f3" path="/var/lib/kubelet/pods/f5f2fd35-f21f-41d2-a254-3460f66af1f3/volumes" Dec 10 15:48:27 crc kubenswrapper[4669]: I1210 15:48:27.398426 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:48:27 crc kubenswrapper[4669]: E1210 15:48:27.399410 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.715386 4669 scope.go:117] "RemoveContainer" containerID="1d9c2494a29ed905770e6ac5033471f8046970e9721c11e102815caaa6bd0b03" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.739980 4669 scope.go:117] "RemoveContainer" containerID="7441f745a917595047737e61341236a737ce951642885aa415102c63fce431c1" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.809753 4669 scope.go:117] "RemoveContainer" containerID="39c338317eb1c5903a455654e5c02cf5cda10309cd7b5c09ba036aa7ebe9b4a1" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.848069 4669 scope.go:117] "RemoveContainer" containerID="8a5474a6efb65ced282a5247b68d6dc1b8038e2da99eb63e77c694961b1db6e4" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.886463 4669 scope.go:117] "RemoveContainer" containerID="2d589bd7fa4cd27fc2e5da50066b57d7ee094b8a9f4829123f9abfafb9909c3f" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.920482 4669 scope.go:117] "RemoveContainer" containerID="634a6f021233b89894ca2fa0a80329f99cac1f892f38991df2ffa9cbc003dd55" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.972162 4669 scope.go:117] "RemoveContainer" containerID="c2585fbec4d72d936e175c42bc682b344d0b008e1a6238d2d861b8d3de1f23f2" Dec 10 15:48:30 crc kubenswrapper[4669]: I1210 15:48:30.991099 4669 scope.go:117] "RemoveContainer" containerID="98f09f7d8047752cfb8a0155a415e8f73bdb5cc5b16223ff4302de518ef77ef9" Dec 10 15:48:31 crc kubenswrapper[4669]: I1210 15:48:31.018263 4669 scope.go:117] "RemoveContainer" containerID="d62249d2b6aa40b34211d9709ebe8bfc27d361e819cb2f1aafc654b80a9f1ff2" Dec 10 15:48:31 crc kubenswrapper[4669]: I1210 15:48:31.048836 4669 scope.go:117] "RemoveContainer" containerID="fe7878214509e235c1e469cb35161c6ea9ae7b7be6d0f0cdba7dc381b38f2917" Dec 10 15:48:31 crc kubenswrapper[4669]: I1210 15:48:31.070931 4669 scope.go:117] "RemoveContainer" containerID="b80f73ad83f7ad7ab81d4fecef4f7e80e06a0c5defaff50d432db01970dfca13" Dec 10 15:48:31 crc kubenswrapper[4669]: I1210 15:48:31.096931 4669 scope.go:117] "RemoveContainer" containerID="a5e1ce484fe54aa0d64c4bcd7705e74501dd705c5fd85c8f02db5e44be07ec2c" Dec 10 15:48:31 crc kubenswrapper[4669]: I1210 15:48:31.120617 4669 scope.go:117] "RemoveContainer" containerID="1d5c73f914f1acf2d5f510466c422834680ac55d4982b523dbc4ae1c165bd4e5" Dec 10 15:48:35 crc kubenswrapper[4669]: I1210 15:48:35.040882 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-k9llm"] Dec 10 15:48:35 crc kubenswrapper[4669]: I1210 15:48:35.048120 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-k9llm"] Dec 10 15:48:36 crc kubenswrapper[4669]: I1210 15:48:36.407596 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f5ebc22-6893-415d-906b-bea9f82f18f4" path="/var/lib/kubelet/pods/9f5ebc22-6893-415d-906b-bea9f82f18f4/volumes" Dec 10 15:48:42 crc kubenswrapper[4669]: I1210 15:48:42.398042 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:48:42 crc kubenswrapper[4669]: E1210 15:48:42.398770 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:48:52 crc kubenswrapper[4669]: I1210 15:48:52.249168 4669 generic.go:334] "Generic (PLEG): container finished" podID="e1113280-5934-4d11-9449-b96a953ca8d4" containerID="b4a26934bbcb88a76fbb30550cacce7bee43e2dcbeeca090322cbbf9e681863e" exitCode=0 Dec 10 15:48:52 crc kubenswrapper[4669]: I1210 15:48:52.249255 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" event={"ID":"e1113280-5934-4d11-9449-b96a953ca8d4","Type":"ContainerDied","Data":"b4a26934bbcb88a76fbb30550cacce7bee43e2dcbeeca090322cbbf9e681863e"} Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.650509 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.753668 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnslr\" (UniqueName: \"kubernetes.io/projected/e1113280-5934-4d11-9449-b96a953ca8d4-kube-api-access-cnslr\") pod \"e1113280-5934-4d11-9449-b96a953ca8d4\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.753723 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-inventory\") pod \"e1113280-5934-4d11-9449-b96a953ca8d4\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.753991 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-ssh-key\") pod \"e1113280-5934-4d11-9449-b96a953ca8d4\" (UID: \"e1113280-5934-4d11-9449-b96a953ca8d4\") " Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.760427 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1113280-5934-4d11-9449-b96a953ca8d4-kube-api-access-cnslr" (OuterVolumeSpecName: "kube-api-access-cnslr") pod "e1113280-5934-4d11-9449-b96a953ca8d4" (UID: "e1113280-5934-4d11-9449-b96a953ca8d4"). InnerVolumeSpecName "kube-api-access-cnslr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.781406 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e1113280-5934-4d11-9449-b96a953ca8d4" (UID: "e1113280-5934-4d11-9449-b96a953ca8d4"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.790260 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-inventory" (OuterVolumeSpecName: "inventory") pod "e1113280-5934-4d11-9449-b96a953ca8d4" (UID: "e1113280-5934-4d11-9449-b96a953ca8d4"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.857149 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.857189 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnslr\" (UniqueName: \"kubernetes.io/projected/e1113280-5934-4d11-9449-b96a953ca8d4-kube-api-access-cnslr\") on node \"crc\" DevicePath \"\"" Dec 10 15:48:53 crc kubenswrapper[4669]: I1210 15:48:53.857205 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e1113280-5934-4d11-9449-b96a953ca8d4-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.268075 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" event={"ID":"e1113280-5934-4d11-9449-b96a953ca8d4","Type":"ContainerDied","Data":"b9575eca7ecf525f01544abb135d42960ccac37b673b79c9be8c2565ebe011a4"} Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.268123 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9575eca7ecf525f01544abb135d42960ccac37b673b79c9be8c2565ebe011a4" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.268171 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.360819 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg"] Dec 10 15:48:54 crc kubenswrapper[4669]: E1210 15:48:54.361280 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="extract-utilities" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.361309 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="extract-utilities" Dec 10 15:48:54 crc kubenswrapper[4669]: E1210 15:48:54.361335 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1113280-5934-4d11-9449-b96a953ca8d4" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.361345 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1113280-5934-4d11-9449-b96a953ca8d4" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 10 15:48:54 crc kubenswrapper[4669]: E1210 15:48:54.361379 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="registry-server" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.361389 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="registry-server" Dec 10 15:48:54 crc kubenswrapper[4669]: E1210 15:48:54.361410 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="extract-content" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.361418 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="extract-content" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.361626 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd322244-a7a0-44a7-9ee3-9975b8724373" containerName="registry-server" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.361651 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1113280-5934-4d11-9449-b96a953ca8d4" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.362331 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.364646 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.364734 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.365017 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ttg2\" (UniqueName: \"kubernetes.io/projected/63939037-98a0-4152-95a4-5a64323a3ee3-kube-api-access-8ttg2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.369497 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.369823 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.373355 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.378574 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.379632 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg"] Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.466202 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.467008 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.467396 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ttg2\" (UniqueName: \"kubernetes.io/projected/63939037-98a0-4152-95a4-5a64323a3ee3-kube-api-access-8ttg2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.473825 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.483716 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.488623 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ttg2\" (UniqueName: \"kubernetes.io/projected/63939037-98a0-4152-95a4-5a64323a3ee3-kube-api-access-8ttg2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:54 crc kubenswrapper[4669]: I1210 15:48:54.724125 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:48:55 crc kubenswrapper[4669]: I1210 15:48:55.313568 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg"] Dec 10 15:48:56 crc kubenswrapper[4669]: I1210 15:48:56.285908 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" event={"ID":"63939037-98a0-4152-95a4-5a64323a3ee3","Type":"ContainerStarted","Data":"0046e154de2d36aa028956391e1376c6114b937abce4728387d0f7f90febd1e3"} Dec 10 15:48:56 crc kubenswrapper[4669]: I1210 15:48:56.286197 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" event={"ID":"63939037-98a0-4152-95a4-5a64323a3ee3","Type":"ContainerStarted","Data":"b1df210019d8394d467dc9e085cac3a2be909243baeb02c04524dc5df77e6684"} Dec 10 15:48:56 crc kubenswrapper[4669]: I1210 15:48:56.313177 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" podStartSLOduration=1.803620725 podStartE2EDuration="2.313157922s" podCreationTimestamp="2025-12-10 15:48:54 +0000 UTC" firstStartedPulling="2025-12-10 15:48:55.312747485 +0000 UTC m=+1709.229694112" lastFinishedPulling="2025-12-10 15:48:55.822284682 +0000 UTC m=+1709.739231309" observedRunningTime="2025-12-10 15:48:56.30897897 +0000 UTC m=+1710.225925637" watchObservedRunningTime="2025-12-10 15:48:56.313157922 +0000 UTC m=+1710.230104569" Dec 10 15:48:57 crc kubenswrapper[4669]: I1210 15:48:57.398532 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:48:57 crc kubenswrapper[4669]: E1210 15:48:57.399593 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:49:00 crc kubenswrapper[4669]: I1210 15:49:00.063532 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-jj4m7"] Dec 10 15:49:00 crc kubenswrapper[4669]: I1210 15:49:00.080974 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-jj4m7"] Dec 10 15:49:00 crc kubenswrapper[4669]: I1210 15:49:00.408208 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0071bd3b-2920-4ba4-bf6e-8d1cacac2591" path="/var/lib/kubelet/pods/0071bd3b-2920-4ba4-bf6e-8d1cacac2591/volumes" Dec 10 15:49:01 crc kubenswrapper[4669]: I1210 15:49:01.332509 4669 generic.go:334] "Generic (PLEG): container finished" podID="63939037-98a0-4152-95a4-5a64323a3ee3" containerID="0046e154de2d36aa028956391e1376c6114b937abce4728387d0f7f90febd1e3" exitCode=0 Dec 10 15:49:01 crc kubenswrapper[4669]: I1210 15:49:01.332572 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" event={"ID":"63939037-98a0-4152-95a4-5a64323a3ee3","Type":"ContainerDied","Data":"0046e154de2d36aa028956391e1376c6114b937abce4728387d0f7f90febd1e3"} Dec 10 15:49:02 crc kubenswrapper[4669]: I1210 15:49:02.758279 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:49:02 crc kubenswrapper[4669]: I1210 15:49:02.941103 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ttg2\" (UniqueName: \"kubernetes.io/projected/63939037-98a0-4152-95a4-5a64323a3ee3-kube-api-access-8ttg2\") pod \"63939037-98a0-4152-95a4-5a64323a3ee3\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " Dec 10 15:49:02 crc kubenswrapper[4669]: I1210 15:49:02.941265 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-ssh-key\") pod \"63939037-98a0-4152-95a4-5a64323a3ee3\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " Dec 10 15:49:02 crc kubenswrapper[4669]: I1210 15:49:02.941325 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-inventory\") pod \"63939037-98a0-4152-95a4-5a64323a3ee3\" (UID: \"63939037-98a0-4152-95a4-5a64323a3ee3\") " Dec 10 15:49:02 crc kubenswrapper[4669]: I1210 15:49:02.947005 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63939037-98a0-4152-95a4-5a64323a3ee3-kube-api-access-8ttg2" (OuterVolumeSpecName: "kube-api-access-8ttg2") pod "63939037-98a0-4152-95a4-5a64323a3ee3" (UID: "63939037-98a0-4152-95a4-5a64323a3ee3"). InnerVolumeSpecName "kube-api-access-8ttg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:49:02 crc kubenswrapper[4669]: I1210 15:49:02.973468 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-inventory" (OuterVolumeSpecName: "inventory") pod "63939037-98a0-4152-95a4-5a64323a3ee3" (UID: "63939037-98a0-4152-95a4-5a64323a3ee3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.009487 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "63939037-98a0-4152-95a4-5a64323a3ee3" (UID: "63939037-98a0-4152-95a4-5a64323a3ee3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.044241 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.044274 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/63939037-98a0-4152-95a4-5a64323a3ee3-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.044286 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ttg2\" (UniqueName: \"kubernetes.io/projected/63939037-98a0-4152-95a4-5a64323a3ee3-kube-api-access-8ttg2\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.347623 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" event={"ID":"63939037-98a0-4152-95a4-5a64323a3ee3","Type":"ContainerDied","Data":"b1df210019d8394d467dc9e085cac3a2be909243baeb02c04524dc5df77e6684"} Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.349066 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1df210019d8394d467dc9e085cac3a2be909243baeb02c04524dc5df77e6684" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.347882 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.453856 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r"] Dec 10 15:49:03 crc kubenswrapper[4669]: E1210 15:49:03.454439 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63939037-98a0-4152-95a4-5a64323a3ee3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.454467 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="63939037-98a0-4152-95a4-5a64323a3ee3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.454807 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="63939037-98a0-4152-95a4-5a64323a3ee3" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.455646 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.458695 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.459634 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.460038 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.460651 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.475354 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r"] Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.655050 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.655106 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hb82g\" (UniqueName: \"kubernetes.io/projected/f9c2de92-64ab-47c4-af56-0422c30259e2-kube-api-access-hb82g\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.655252 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.756617 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.756671 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hb82g\" (UniqueName: \"kubernetes.io/projected/f9c2de92-64ab-47c4-af56-0422c30259e2-kube-api-access-hb82g\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.756735 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.761406 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.762369 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:03 crc kubenswrapper[4669]: I1210 15:49:03.789744 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hb82g\" (UniqueName: \"kubernetes.io/projected/f9c2de92-64ab-47c4-af56-0422c30259e2-kube-api-access-hb82g\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-htq5r\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:04 crc kubenswrapper[4669]: I1210 15:49:04.083303 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:04 crc kubenswrapper[4669]: I1210 15:49:04.655634 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r"] Dec 10 15:49:05 crc kubenswrapper[4669]: I1210 15:49:05.380544 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" event={"ID":"f9c2de92-64ab-47c4-af56-0422c30259e2","Type":"ContainerStarted","Data":"3b34d371fd8d54962717b3bd8dd4a48de7bbfa9967d818ac8e826105b9e3fb5d"} Dec 10 15:49:06 crc kubenswrapper[4669]: I1210 15:49:06.392822 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" event={"ID":"f9c2de92-64ab-47c4-af56-0422c30259e2","Type":"ContainerStarted","Data":"377abbe37fe1df7cafe74819bad0a9c524ad52db33e9c0ea0574fa4a11be30d2"} Dec 10 15:49:06 crc kubenswrapper[4669]: I1210 15:49:06.412189 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" podStartSLOduration=2.839926931 podStartE2EDuration="3.412170209s" podCreationTimestamp="2025-12-10 15:49:03 +0000 UTC" firstStartedPulling="2025-12-10 15:49:04.669085322 +0000 UTC m=+1718.586031949" lastFinishedPulling="2025-12-10 15:49:05.2413286 +0000 UTC m=+1719.158275227" observedRunningTime="2025-12-10 15:49:06.411049441 +0000 UTC m=+1720.327996058" watchObservedRunningTime="2025-12-10 15:49:06.412170209 +0000 UTC m=+1720.329116836" Dec 10 15:49:10 crc kubenswrapper[4669]: I1210 15:49:10.399143 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:49:10 crc kubenswrapper[4669]: E1210 15:49:10.401134 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:49:11 crc kubenswrapper[4669]: I1210 15:49:11.028532 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-7w6wd"] Dec 10 15:49:11 crc kubenswrapper[4669]: I1210 15:49:11.038957 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-7w6wd"] Dec 10 15:49:11 crc kubenswrapper[4669]: I1210 15:49:11.048546 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-2w6bt"] Dec 10 15:49:11 crc kubenswrapper[4669]: I1210 15:49:11.056809 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-2w6bt"] Dec 10 15:49:11 crc kubenswrapper[4669]: I1210 15:49:11.063959 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-hqkqq"] Dec 10 15:49:11 crc kubenswrapper[4669]: I1210 15:49:11.071850 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-hqkqq"] Dec 10 15:49:12 crc kubenswrapper[4669]: I1210 15:49:12.406650 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56b87d2e-68a9-42d1-87c5-68d6010539ea" path="/var/lib/kubelet/pods/56b87d2e-68a9-42d1-87c5-68d6010539ea/volumes" Dec 10 15:49:12 crc kubenswrapper[4669]: I1210 15:49:12.407804 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c69a866b-0a6c-446d-aeea-24a9a6e95efa" path="/var/lib/kubelet/pods/c69a866b-0a6c-446d-aeea-24a9a6e95efa/volumes" Dec 10 15:49:12 crc kubenswrapper[4669]: I1210 15:49:12.408514 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9641c8e-77a7-47c5-b7f4-16d6e7061c5f" path="/var/lib/kubelet/pods/c9641c8e-77a7-47c5-b7f4-16d6e7061c5f/volumes" Dec 10 15:49:22 crc kubenswrapper[4669]: I1210 15:49:22.398516 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:49:22 crc kubenswrapper[4669]: E1210 15:49:22.399264 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:49:27 crc kubenswrapper[4669]: I1210 15:49:27.039396 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-8cqw2"] Dec 10 15:49:27 crc kubenswrapper[4669]: I1210 15:49:27.052348 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-8cqw2"] Dec 10 15:49:28 crc kubenswrapper[4669]: I1210 15:49:28.422631 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20e32d2e-0738-45f0-bb91-b8e48694928b" path="/var/lib/kubelet/pods/20e32d2e-0738-45f0-bb91-b8e48694928b/volumes" Dec 10 15:49:31 crc kubenswrapper[4669]: I1210 15:49:31.347306 4669 scope.go:117] "RemoveContainer" containerID="c37bd5aa7e9c21d27ec9dc5f4911c5cfc44adaac77bc8321302741a037bf2d08" Dec 10 15:49:31 crc kubenswrapper[4669]: I1210 15:49:31.392027 4669 scope.go:117] "RemoveContainer" containerID="087f49333541075c99219f12683e02fd8739949b2ef01ad6b8f3afedd9a804f5" Dec 10 15:49:31 crc kubenswrapper[4669]: I1210 15:49:31.433893 4669 scope.go:117] "RemoveContainer" containerID="0fa4f99a89b4fbe2866d4865e468969e0bd0d431267a1e8e8ed384e991f7ef9d" Dec 10 15:49:31 crc kubenswrapper[4669]: I1210 15:49:31.525049 4669 scope.go:117] "RemoveContainer" containerID="393da6634b1d6ea86e46c5f6cbe177464e847be19f2ca3640cf1cfe541d97280" Dec 10 15:49:31 crc kubenswrapper[4669]: I1210 15:49:31.557734 4669 scope.go:117] "RemoveContainer" containerID="b6e7fc81b5a184992accb517e648b13e03db3428eaaf7b672c5d9e87023e6adf" Dec 10 15:49:31 crc kubenswrapper[4669]: I1210 15:49:31.598525 4669 scope.go:117] "RemoveContainer" containerID="9d4ec21be6882f695adae6225c4e2409e150ae6980432998e5b3afaba137946f" Dec 10 15:49:33 crc kubenswrapper[4669]: I1210 15:49:33.398515 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:49:33 crc kubenswrapper[4669]: E1210 15:49:33.398914 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:49:48 crc kubenswrapper[4669]: I1210 15:49:48.398098 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:49:48 crc kubenswrapper[4669]: E1210 15:49:48.399323 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:49:48 crc kubenswrapper[4669]: I1210 15:49:48.777021 4669 generic.go:334] "Generic (PLEG): container finished" podID="f9c2de92-64ab-47c4-af56-0422c30259e2" containerID="377abbe37fe1df7cafe74819bad0a9c524ad52db33e9c0ea0574fa4a11be30d2" exitCode=0 Dec 10 15:49:48 crc kubenswrapper[4669]: I1210 15:49:48.777122 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" event={"ID":"f9c2de92-64ab-47c4-af56-0422c30259e2","Type":"ContainerDied","Data":"377abbe37fe1df7cafe74819bad0a9c524ad52db33e9c0ea0574fa4a11be30d2"} Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.228250 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.391842 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hb82g\" (UniqueName: \"kubernetes.io/projected/f9c2de92-64ab-47c4-af56-0422c30259e2-kube-api-access-hb82g\") pod \"f9c2de92-64ab-47c4-af56-0422c30259e2\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.392343 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-ssh-key\") pod \"f9c2de92-64ab-47c4-af56-0422c30259e2\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.392556 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-inventory\") pod \"f9c2de92-64ab-47c4-af56-0422c30259e2\" (UID: \"f9c2de92-64ab-47c4-af56-0422c30259e2\") " Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.402589 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c2de92-64ab-47c4-af56-0422c30259e2-kube-api-access-hb82g" (OuterVolumeSpecName: "kube-api-access-hb82g") pod "f9c2de92-64ab-47c4-af56-0422c30259e2" (UID: "f9c2de92-64ab-47c4-af56-0422c30259e2"). InnerVolumeSpecName "kube-api-access-hb82g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.429414 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-inventory" (OuterVolumeSpecName: "inventory") pod "f9c2de92-64ab-47c4-af56-0422c30259e2" (UID: "f9c2de92-64ab-47c4-af56-0422c30259e2"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.438570 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f9c2de92-64ab-47c4-af56-0422c30259e2" (UID: "f9c2de92-64ab-47c4-af56-0422c30259e2"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.496195 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hb82g\" (UniqueName: \"kubernetes.io/projected/f9c2de92-64ab-47c4-af56-0422c30259e2-kube-api-access-hb82g\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.496252 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.496268 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9c2de92-64ab-47c4-af56-0422c30259e2-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.801194 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" event={"ID":"f9c2de92-64ab-47c4-af56-0422c30259e2","Type":"ContainerDied","Data":"3b34d371fd8d54962717b3bd8dd4a48de7bbfa9967d818ac8e826105b9e3fb5d"} Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.801896 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b34d371fd8d54962717b3bd8dd4a48de7bbfa9967d818ac8e826105b9e3fb5d" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.801337 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-htq5r" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.897366 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk"] Dec 10 15:49:50 crc kubenswrapper[4669]: E1210 15:49:50.897925 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c2de92-64ab-47c4-af56-0422c30259e2" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.897954 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c2de92-64ab-47c4-af56-0422c30259e2" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.898263 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c2de92-64ab-47c4-af56-0422c30259e2" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.899283 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.905545 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.906023 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.906160 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.906296 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:49:50 crc kubenswrapper[4669]: I1210 15:49:50.937236 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk"] Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.004936 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkcrn\" (UniqueName: \"kubernetes.io/projected/c65b4bdc-0d6f-4952-9945-6685f952fe6c-kube-api-access-jkcrn\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.005097 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.005192 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.106512 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.106639 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkcrn\" (UniqueName: \"kubernetes.io/projected/c65b4bdc-0d6f-4952-9945-6685f952fe6c-kube-api-access-jkcrn\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.106744 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.117210 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-ssh-key\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.120829 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-inventory\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.123655 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkcrn\" (UniqueName: \"kubernetes.io/projected/c65b4bdc-0d6f-4952-9945-6685f952fe6c-kube-api-access-jkcrn\") pod \"ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.234521 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.763760 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk"] Dec 10 15:49:51 crc kubenswrapper[4669]: I1210 15:49:51.813782 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" event={"ID":"c65b4bdc-0d6f-4952-9945-6685f952fe6c","Type":"ContainerStarted","Data":"2a0d1e7ede3d62a6783d61ba2233aab093f7c92fb4d1af5253d79e221b64db8c"} Dec 10 15:49:52 crc kubenswrapper[4669]: I1210 15:49:52.824777 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" event={"ID":"c65b4bdc-0d6f-4952-9945-6685f952fe6c","Type":"ContainerStarted","Data":"4202622b1cb0d64f043b82b7d356229af0ad2e1609ce4f6df8b3e8bd3c7b08c0"} Dec 10 15:49:52 crc kubenswrapper[4669]: I1210 15:49:52.851974 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" podStartSLOduration=2.318036408 podStartE2EDuration="2.851955772s" podCreationTimestamp="2025-12-10 15:49:50 +0000 UTC" firstStartedPulling="2025-12-10 15:49:51.764860201 +0000 UTC m=+1765.681806828" lastFinishedPulling="2025-12-10 15:49:52.298779565 +0000 UTC m=+1766.215726192" observedRunningTime="2025-12-10 15:49:52.84288109 +0000 UTC m=+1766.759827717" watchObservedRunningTime="2025-12-10 15:49:52.851955772 +0000 UTC m=+1766.768902399" Dec 10 15:49:56 crc kubenswrapper[4669]: I1210 15:49:56.856476 4669 generic.go:334] "Generic (PLEG): container finished" podID="c65b4bdc-0d6f-4952-9945-6685f952fe6c" containerID="4202622b1cb0d64f043b82b7d356229af0ad2e1609ce4f6df8b3e8bd3c7b08c0" exitCode=0 Dec 10 15:49:56 crc kubenswrapper[4669]: I1210 15:49:56.857755 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" event={"ID":"c65b4bdc-0d6f-4952-9945-6685f952fe6c","Type":"ContainerDied","Data":"4202622b1cb0d64f043b82b7d356229af0ad2e1609ce4f6df8b3e8bd3c7b08c0"} Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.270068 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.369578 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkcrn\" (UniqueName: \"kubernetes.io/projected/c65b4bdc-0d6f-4952-9945-6685f952fe6c-kube-api-access-jkcrn\") pod \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.369841 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-ssh-key\") pod \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.369894 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-inventory\") pod \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\" (UID: \"c65b4bdc-0d6f-4952-9945-6685f952fe6c\") " Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.381494 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c65b4bdc-0d6f-4952-9945-6685f952fe6c-kube-api-access-jkcrn" (OuterVolumeSpecName: "kube-api-access-jkcrn") pod "c65b4bdc-0d6f-4952-9945-6685f952fe6c" (UID: "c65b4bdc-0d6f-4952-9945-6685f952fe6c"). InnerVolumeSpecName "kube-api-access-jkcrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.407380 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c65b4bdc-0d6f-4952-9945-6685f952fe6c" (UID: "c65b4bdc-0d6f-4952-9945-6685f952fe6c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.413681 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-inventory" (OuterVolumeSpecName: "inventory") pod "c65b4bdc-0d6f-4952-9945-6685f952fe6c" (UID: "c65b4bdc-0d6f-4952-9945-6685f952fe6c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.471539 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkcrn\" (UniqueName: \"kubernetes.io/projected/c65b4bdc-0d6f-4952-9945-6685f952fe6c-kube-api-access-jkcrn\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.471568 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.471578 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c65b4bdc-0d6f-4952-9945-6685f952fe6c-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.879037 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" event={"ID":"c65b4bdc-0d6f-4952-9945-6685f952fe6c","Type":"ContainerDied","Data":"2a0d1e7ede3d62a6783d61ba2233aab093f7c92fb4d1af5253d79e221b64db8c"} Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.879076 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2a0d1e7ede3d62a6783d61ba2233aab093f7c92fb4d1af5253d79e221b64db8c" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.879127 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.968918 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2"] Dec 10 15:49:58 crc kubenswrapper[4669]: E1210 15:49:58.969412 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c65b4bdc-0d6f-4952-9945-6685f952fe6c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.969442 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="c65b4bdc-0d6f-4952-9945-6685f952fe6c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.969699 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="c65b4bdc-0d6f-4952-9945-6685f952fe6c" containerName="ceph-hci-pre-edpm-deployment-openstack-edpm-ipam" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.970431 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.973358 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.973673 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.974005 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.974254 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:49:58 crc kubenswrapper[4669]: I1210 15:49:58.986021 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2"] Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.085433 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.085514 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bkjkb\" (UniqueName: \"kubernetes.io/projected/b543a9bb-281e-40da-bc8d-9a50df670090-kube-api-access-bkjkb\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.085581 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.187451 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bkjkb\" (UniqueName: \"kubernetes.io/projected/b543a9bb-281e-40da-bc8d-9a50df670090-kube-api-access-bkjkb\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.187973 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.188161 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.192251 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.193184 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.209061 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bkjkb\" (UniqueName: \"kubernetes.io/projected/b543a9bb-281e-40da-bc8d-9a50df670090-kube-api-access-bkjkb\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.291556 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.400576 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:49:59 crc kubenswrapper[4669]: E1210 15:49:59.400840 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.835452 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2"] Dec 10 15:49:59 crc kubenswrapper[4669]: I1210 15:49:59.887335 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" event={"ID":"b543a9bb-281e-40da-bc8d-9a50df670090","Type":"ContainerStarted","Data":"91f919c90fb6757679ecee7dc6c7fa32ea19521e51596fa25949733871088478"} Dec 10 15:50:00 crc kubenswrapper[4669]: I1210 15:50:00.901310 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" event={"ID":"b543a9bb-281e-40da-bc8d-9a50df670090","Type":"ContainerStarted","Data":"4b169218047c0e8d17dab055ff6c48adec74f5359c7de84ba5405237009c4c8e"} Dec 10 15:50:00 crc kubenswrapper[4669]: I1210 15:50:00.922476 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" podStartSLOduration=2.429212332 podStartE2EDuration="2.922456778s" podCreationTimestamp="2025-12-10 15:49:58 +0000 UTC" firstStartedPulling="2025-12-10 15:49:59.844959994 +0000 UTC m=+1773.761906621" lastFinishedPulling="2025-12-10 15:50:00.33820444 +0000 UTC m=+1774.255151067" observedRunningTime="2025-12-10 15:50:00.918631614 +0000 UTC m=+1774.835578251" watchObservedRunningTime="2025-12-10 15:50:00.922456778 +0000 UTC m=+1774.839403405" Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.063208 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-9gbtr"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.069950 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-gzt9w"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.077842 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-j4vkk"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.085198 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-1075-account-create-update-fw8l2"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.094017 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-1d59-account-create-update-zsxjn"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.102133 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f2c5-account-create-update-kjc5w"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.109860 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-j4vkk"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.117328 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-gzt9w"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.124441 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-1075-account-create-update-fw8l2"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.130304 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-9gbtr"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.136382 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-1d59-account-create-update-zsxjn"] Dec 10 15:50:07 crc kubenswrapper[4669]: I1210 15:50:07.142245 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f2c5-account-create-update-kjc5w"] Dec 10 15:50:08 crc kubenswrapper[4669]: I1210 15:50:08.431421 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="33b1a847-d820-4001-ad7e-226c098d4953" path="/var/lib/kubelet/pods/33b1a847-d820-4001-ad7e-226c098d4953/volumes" Dec 10 15:50:08 crc kubenswrapper[4669]: I1210 15:50:08.432365 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a238de9-2038-4d87-a2c7-c3646e713865" path="/var/lib/kubelet/pods/4a238de9-2038-4d87-a2c7-c3646e713865/volumes" Dec 10 15:50:08 crc kubenswrapper[4669]: I1210 15:50:08.433420 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6026f9fc-7f7c-45cd-b88e-3eb1735014b4" path="/var/lib/kubelet/pods/6026f9fc-7f7c-45cd-b88e-3eb1735014b4/volumes" Dec 10 15:50:08 crc kubenswrapper[4669]: I1210 15:50:08.434120 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab" path="/var/lib/kubelet/pods/b3b701b7-73e0-4f8e-bc46-9c0b6cd9f9ab/volumes" Dec 10 15:50:08 crc kubenswrapper[4669]: I1210 15:50:08.434929 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4467f81-efe4-46f1-9bce-40afbc34252b" path="/var/lib/kubelet/pods/d4467f81-efe4-46f1-9bce-40afbc34252b/volumes" Dec 10 15:50:08 crc kubenswrapper[4669]: I1210 15:50:08.435817 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d72e907f-0b02-41bf-a8b0-d28a2b7856aa" path="/var/lib/kubelet/pods/d72e907f-0b02-41bf-a8b0-d28a2b7856aa/volumes" Dec 10 15:50:13 crc kubenswrapper[4669]: I1210 15:50:13.397714 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:50:13 crc kubenswrapper[4669]: E1210 15:50:13.398563 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:50:28 crc kubenswrapper[4669]: I1210 15:50:28.398574 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:50:28 crc kubenswrapper[4669]: E1210 15:50:28.399415 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:50:31 crc kubenswrapper[4669]: I1210 15:50:31.748941 4669 scope.go:117] "RemoveContainer" containerID="9e77d8aa1ddbf9803b85bd3bd0a12384ea3fbc59111ccd8c7a0bff35637b0ef1" Dec 10 15:50:31 crc kubenswrapper[4669]: I1210 15:50:31.780784 4669 scope.go:117] "RemoveContainer" containerID="6422c87b8c518be5ee4af3a204d158dffc5d8d9d0311aaae38e275a3311aedd0" Dec 10 15:50:31 crc kubenswrapper[4669]: I1210 15:50:31.827414 4669 scope.go:117] "RemoveContainer" containerID="f3ab1cbd5ffa9f90f9348b940a66444bad5cf37189854b2da2ef6615211539fb" Dec 10 15:50:31 crc kubenswrapper[4669]: I1210 15:50:31.866118 4669 scope.go:117] "RemoveContainer" containerID="47fd761d0633d50b27190e91a9ba7763bb582cae823d088556b911575cf24659" Dec 10 15:50:31 crc kubenswrapper[4669]: I1210 15:50:31.913037 4669 scope.go:117] "RemoveContainer" containerID="a4f8b80be3d31881ec8721a84b6c5dcb979941e06be441aa24e0ba0f72e16d61" Dec 10 15:50:31 crc kubenswrapper[4669]: I1210 15:50:31.933184 4669 scope.go:117] "RemoveContainer" containerID="454788ad57eef6fe87288ed5b332d6773f1557e5690635f347abb5460bb89b4f" Dec 10 15:50:35 crc kubenswrapper[4669]: I1210 15:50:35.040532 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wvfpg"] Dec 10 15:50:35 crc kubenswrapper[4669]: I1210 15:50:35.048296 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-wvfpg"] Dec 10 15:50:36 crc kubenswrapper[4669]: I1210 15:50:36.409159 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35" path="/var/lib/kubelet/pods/2fc15c9c-f4fa-44fc-b8f8-19e4e2d6fc35/volumes" Dec 10 15:50:43 crc kubenswrapper[4669]: I1210 15:50:43.398025 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:50:43 crc kubenswrapper[4669]: E1210 15:50:43.398843 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:50:54 crc kubenswrapper[4669]: I1210 15:50:54.398550 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:50:54 crc kubenswrapper[4669]: E1210 15:50:54.399555 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:50:55 crc kubenswrapper[4669]: I1210 15:50:55.444411 4669 generic.go:334] "Generic (PLEG): container finished" podID="b543a9bb-281e-40da-bc8d-9a50df670090" containerID="4b169218047c0e8d17dab055ff6c48adec74f5359c7de84ba5405237009c4c8e" exitCode=0 Dec 10 15:50:55 crc kubenswrapper[4669]: I1210 15:50:55.444764 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" event={"ID":"b543a9bb-281e-40da-bc8d-9a50df670090","Type":"ContainerDied","Data":"4b169218047c0e8d17dab055ff6c48adec74f5359c7de84ba5405237009c4c8e"} Dec 10 15:50:56 crc kubenswrapper[4669]: I1210 15:50:56.906932 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.090086 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-inventory\") pod \"b543a9bb-281e-40da-bc8d-9a50df670090\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.090188 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-ssh-key\") pod \"b543a9bb-281e-40da-bc8d-9a50df670090\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.090344 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bkjkb\" (UniqueName: \"kubernetes.io/projected/b543a9bb-281e-40da-bc8d-9a50df670090-kube-api-access-bkjkb\") pod \"b543a9bb-281e-40da-bc8d-9a50df670090\" (UID: \"b543a9bb-281e-40da-bc8d-9a50df670090\") " Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.102777 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b543a9bb-281e-40da-bc8d-9a50df670090-kube-api-access-bkjkb" (OuterVolumeSpecName: "kube-api-access-bkjkb") pod "b543a9bb-281e-40da-bc8d-9a50df670090" (UID: "b543a9bb-281e-40da-bc8d-9a50df670090"). InnerVolumeSpecName "kube-api-access-bkjkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.127345 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b543a9bb-281e-40da-bc8d-9a50df670090" (UID: "b543a9bb-281e-40da-bc8d-9a50df670090"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.128157 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-inventory" (OuterVolumeSpecName: "inventory") pod "b543a9bb-281e-40da-bc8d-9a50df670090" (UID: "b543a9bb-281e-40da-bc8d-9a50df670090"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.192956 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bkjkb\" (UniqueName: \"kubernetes.io/projected/b543a9bb-281e-40da-bc8d-9a50df670090-kube-api-access-bkjkb\") on node \"crc\" DevicePath \"\"" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.192992 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.193000 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b543a9bb-281e-40da-bc8d-9a50df670090-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.461387 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" event={"ID":"b543a9bb-281e-40da-bc8d-9a50df670090","Type":"ContainerDied","Data":"91f919c90fb6757679ecee7dc6c7fa32ea19521e51596fa25949733871088478"} Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.461424 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="91f919c90fb6757679ecee7dc6c7fa32ea19521e51596fa25949733871088478" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.461435 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.572929 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-vmdrd"] Dec 10 15:50:57 crc kubenswrapper[4669]: E1210 15:50:57.573487 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b543a9bb-281e-40da-bc8d-9a50df670090" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.573513 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="b543a9bb-281e-40da-bc8d-9a50df670090" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.574958 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="b543a9bb-281e-40da-bc8d-9a50df670090" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.576251 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.582439 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.582465 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.582516 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.582703 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.598273 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-vmdrd"] Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.703016 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.703131 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.703709 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66t2c\" (UniqueName: \"kubernetes.io/projected/4617562a-4946-4591-b331-853992039296-kube-api-access-66t2c\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.805514 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.805647 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.805759 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66t2c\" (UniqueName: \"kubernetes.io/projected/4617562a-4946-4591-b331-853992039296-kube-api-access-66t2c\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.810452 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.819644 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.829034 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66t2c\" (UniqueName: \"kubernetes.io/projected/4617562a-4946-4591-b331-853992039296-kube-api-access-66t2c\") pod \"ssh-known-hosts-edpm-deployment-vmdrd\" (UID: \"4617562a-4946-4591-b331-853992039296\") " pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:57 crc kubenswrapper[4669]: I1210 15:50:57.900018 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:50:58 crc kubenswrapper[4669]: I1210 15:50:58.063677 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-szq9q"] Dec 10 15:50:58 crc kubenswrapper[4669]: I1210 15:50:58.075982 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-szq9q"] Dec 10 15:50:58 crc kubenswrapper[4669]: I1210 15:50:58.413081 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d87be1f9-462a-4c6e-b252-11f57a2efe0f" path="/var/lib/kubelet/pods/d87be1f9-462a-4c6e-b252-11f57a2efe0f/volumes" Dec 10 15:50:58 crc kubenswrapper[4669]: I1210 15:50:58.445467 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-vmdrd"] Dec 10 15:50:58 crc kubenswrapper[4669]: I1210 15:50:58.473903 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" event={"ID":"4617562a-4946-4591-b331-853992039296","Type":"ContainerStarted","Data":"0fd08f05a3f284482c8535efe1ca8500cd3e5c47587a7c9c0cd6ed8bc4587441"} Dec 10 15:50:59 crc kubenswrapper[4669]: I1210 15:50:59.044509 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6hngc"] Dec 10 15:50:59 crc kubenswrapper[4669]: I1210 15:50:59.052296 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-6hngc"] Dec 10 15:50:59 crc kubenswrapper[4669]: I1210 15:50:59.482164 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" event={"ID":"4617562a-4946-4591-b331-853992039296","Type":"ContainerStarted","Data":"61dba3f08857a39fc057fb57f98edb5e954cf7fbff32bfe765f9fc026811240c"} Dec 10 15:51:00 crc kubenswrapper[4669]: I1210 15:51:00.412784 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2bdf613-d862-4e15-a915-32b1789bc868" path="/var/lib/kubelet/pods/a2bdf613-d862-4e15-a915-32b1789bc868/volumes" Dec 10 15:51:06 crc kubenswrapper[4669]: I1210 15:51:06.403485 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:51:06 crc kubenswrapper[4669]: E1210 15:51:06.404093 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:51:06 crc kubenswrapper[4669]: I1210 15:51:06.565362 4669 generic.go:334] "Generic (PLEG): container finished" podID="4617562a-4946-4591-b331-853992039296" containerID="61dba3f08857a39fc057fb57f98edb5e954cf7fbff32bfe765f9fc026811240c" exitCode=0 Dec 10 15:51:06 crc kubenswrapper[4669]: I1210 15:51:06.565489 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" event={"ID":"4617562a-4946-4591-b331-853992039296","Type":"ContainerDied","Data":"61dba3f08857a39fc057fb57f98edb5e954cf7fbff32bfe765f9fc026811240c"} Dec 10 15:51:07 crc kubenswrapper[4669]: I1210 15:51:07.986933 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.111045 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66t2c\" (UniqueName: \"kubernetes.io/projected/4617562a-4946-4591-b331-853992039296-kube-api-access-66t2c\") pod \"4617562a-4946-4591-b331-853992039296\" (UID: \"4617562a-4946-4591-b331-853992039296\") " Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.111158 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-inventory-0\") pod \"4617562a-4946-4591-b331-853992039296\" (UID: \"4617562a-4946-4591-b331-853992039296\") " Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.111313 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-ssh-key-openstack-edpm-ipam\") pod \"4617562a-4946-4591-b331-853992039296\" (UID: \"4617562a-4946-4591-b331-853992039296\") " Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.128466 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4617562a-4946-4591-b331-853992039296-kube-api-access-66t2c" (OuterVolumeSpecName: "kube-api-access-66t2c") pod "4617562a-4946-4591-b331-853992039296" (UID: "4617562a-4946-4591-b331-853992039296"). InnerVolumeSpecName "kube-api-access-66t2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.137406 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "4617562a-4946-4591-b331-853992039296" (UID: "4617562a-4946-4591-b331-853992039296"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.138762 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "4617562a-4946-4591-b331-853992039296" (UID: "4617562a-4946-4591-b331-853992039296"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.213363 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.213404 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66t2c\" (UniqueName: \"kubernetes.io/projected/4617562a-4946-4591-b331-853992039296-kube-api-access-66t2c\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.213417 4669 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4617562a-4946-4591-b331-853992039296-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.583636 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" event={"ID":"4617562a-4946-4591-b331-853992039296","Type":"ContainerDied","Data":"0fd08f05a3f284482c8535efe1ca8500cd3e5c47587a7c9c0cd6ed8bc4587441"} Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.583678 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0fd08f05a3f284482c8535efe1ca8500cd3e5c47587a7c9c0cd6ed8bc4587441" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.584014 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-vmdrd" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.670854 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx"] Dec 10 15:51:08 crc kubenswrapper[4669]: E1210 15:51:08.671242 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4617562a-4946-4591-b331-853992039296" containerName="ssh-known-hosts-edpm-deployment" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.671259 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="4617562a-4946-4591-b331-853992039296" containerName="ssh-known-hosts-edpm-deployment" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.671432 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="4617562a-4946-4591-b331-853992039296" containerName="ssh-known-hosts-edpm-deployment" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.671981 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.674022 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.674348 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.674395 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.674951 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.689102 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx"] Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.823676 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.823744 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5l2h\" (UniqueName: \"kubernetes.io/projected/ced1fc19-0076-4a98-82fa-a93c33c4a43a-kube-api-access-w5l2h\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.823835 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.925126 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.925514 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5l2h\" (UniqueName: \"kubernetes.io/projected/ced1fc19-0076-4a98-82fa-a93c33c4a43a-kube-api-access-w5l2h\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.925586 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.929522 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.933876 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.943710 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5l2h\" (UniqueName: \"kubernetes.io/projected/ced1fc19-0076-4a98-82fa-a93c33c4a43a-kube-api-access-w5l2h\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-ch2tx\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:08 crc kubenswrapper[4669]: I1210 15:51:08.986370 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:09 crc kubenswrapper[4669]: I1210 15:51:09.588834 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx"] Dec 10 15:51:10 crc kubenswrapper[4669]: I1210 15:51:10.605507 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" event={"ID":"ced1fc19-0076-4a98-82fa-a93c33c4a43a","Type":"ContainerStarted","Data":"e52762a87ffff0612cd5349f771a207fe54506529247a8e0e80d5e3bc7b1318e"} Dec 10 15:51:12 crc kubenswrapper[4669]: I1210 15:51:12.623064 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" event={"ID":"ced1fc19-0076-4a98-82fa-a93c33c4a43a","Type":"ContainerStarted","Data":"7c847911c4d3d9cebf7d8f7fc9ba04c2f47ef51ed88efadcb798370da254e6be"} Dec 10 15:51:12 crc kubenswrapper[4669]: I1210 15:51:12.640330 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" podStartSLOduration=2.289274437 podStartE2EDuration="4.640307665s" podCreationTimestamp="2025-12-10 15:51:08 +0000 UTC" firstStartedPulling="2025-12-10 15:51:09.616934059 +0000 UTC m=+1843.533880686" lastFinishedPulling="2025-12-10 15:51:11.967967287 +0000 UTC m=+1845.884913914" observedRunningTime="2025-12-10 15:51:12.637382984 +0000 UTC m=+1846.554329601" watchObservedRunningTime="2025-12-10 15:51:12.640307665 +0000 UTC m=+1846.557254292" Dec 10 15:51:19 crc kubenswrapper[4669]: I1210 15:51:19.397680 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:51:19 crc kubenswrapper[4669]: E1210 15:51:19.398767 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:51:20 crc kubenswrapper[4669]: I1210 15:51:20.685023 4669 generic.go:334] "Generic (PLEG): container finished" podID="ced1fc19-0076-4a98-82fa-a93c33c4a43a" containerID="7c847911c4d3d9cebf7d8f7fc9ba04c2f47ef51ed88efadcb798370da254e6be" exitCode=0 Dec 10 15:51:20 crc kubenswrapper[4669]: I1210 15:51:20.685093 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" event={"ID":"ced1fc19-0076-4a98-82fa-a93c33c4a43a","Type":"ContainerDied","Data":"7c847911c4d3d9cebf7d8f7fc9ba04c2f47ef51ed88efadcb798370da254e6be"} Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.086414 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.201997 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5l2h\" (UniqueName: \"kubernetes.io/projected/ced1fc19-0076-4a98-82fa-a93c33c4a43a-kube-api-access-w5l2h\") pod \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.202078 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-ssh-key\") pod \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.202120 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-inventory\") pod \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\" (UID: \"ced1fc19-0076-4a98-82fa-a93c33c4a43a\") " Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.209571 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ced1fc19-0076-4a98-82fa-a93c33c4a43a-kube-api-access-w5l2h" (OuterVolumeSpecName: "kube-api-access-w5l2h") pod "ced1fc19-0076-4a98-82fa-a93c33c4a43a" (UID: "ced1fc19-0076-4a98-82fa-a93c33c4a43a"). InnerVolumeSpecName "kube-api-access-w5l2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.233037 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ced1fc19-0076-4a98-82fa-a93c33c4a43a" (UID: "ced1fc19-0076-4a98-82fa-a93c33c4a43a"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.233457 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-inventory" (OuterVolumeSpecName: "inventory") pod "ced1fc19-0076-4a98-82fa-a93c33c4a43a" (UID: "ced1fc19-0076-4a98-82fa-a93c33c4a43a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.304679 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.304704 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ced1fc19-0076-4a98-82fa-a93c33c4a43a-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.304714 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5l2h\" (UniqueName: \"kubernetes.io/projected/ced1fc19-0076-4a98-82fa-a93c33c4a43a-kube-api-access-w5l2h\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.701936 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" event={"ID":"ced1fc19-0076-4a98-82fa-a93c33c4a43a","Type":"ContainerDied","Data":"e52762a87ffff0612cd5349f771a207fe54506529247a8e0e80d5e3bc7b1318e"} Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.701981 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e52762a87ffff0612cd5349f771a207fe54506529247a8e0e80d5e3bc7b1318e" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.702021 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-ch2tx" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.804174 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk"] Dec 10 15:51:22 crc kubenswrapper[4669]: E1210 15:51:22.804832 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ced1fc19-0076-4a98-82fa-a93c33c4a43a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.805011 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="ced1fc19-0076-4a98-82fa-a93c33c4a43a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.805264 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="ced1fc19-0076-4a98-82fa-a93c33c4a43a" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.805893 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.815476 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.815688 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.815884 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rl7xp" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.816050 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.823400 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk"] Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.918990 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.919157 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:22 crc kubenswrapper[4669]: I1210 15:51:22.919364 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd42f\" (UniqueName: \"kubernetes.io/projected/a7f38325-ba76-4bb5-85c2-b0fc44213a71-kube-api-access-dd42f\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.021073 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.021167 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd42f\" (UniqueName: \"kubernetes.io/projected/a7f38325-ba76-4bb5-85c2-b0fc44213a71-kube-api-access-dd42f\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.021248 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.024872 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.024896 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.038184 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd42f\" (UniqueName: \"kubernetes.io/projected/a7f38325-ba76-4bb5-85c2-b0fc44213a71-kube-api-access-dd42f\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.133875 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.658342 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk"] Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.666692 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:51:23 crc kubenswrapper[4669]: I1210 15:51:23.712483 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" event={"ID":"a7f38325-ba76-4bb5-85c2-b0fc44213a71","Type":"ContainerStarted","Data":"22a3470e469dc3152453ae26877be7b4208f1203dbe906446a1b8a5c9761f5b2"} Dec 10 15:51:28 crc kubenswrapper[4669]: I1210 15:51:28.790700 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 10 15:51:29 crc kubenswrapper[4669]: I1210 15:51:29.770915 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" event={"ID":"a7f38325-ba76-4bb5-85c2-b0fc44213a71","Type":"ContainerStarted","Data":"daadd32f516c7ed0f64c8baf8be1a76e3fdd0070440ef2d660b1a7cb80861c95"} Dec 10 15:51:29 crc kubenswrapper[4669]: I1210 15:51:29.791378 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" podStartSLOduration=2.669772774 podStartE2EDuration="7.79136394s" podCreationTimestamp="2025-12-10 15:51:22 +0000 UTC" firstStartedPulling="2025-12-10 15:51:23.666456578 +0000 UTC m=+1857.583403205" lastFinishedPulling="2025-12-10 15:51:28.788047704 +0000 UTC m=+1862.704994371" observedRunningTime="2025-12-10 15:51:29.789682468 +0000 UTC m=+1863.706629095" watchObservedRunningTime="2025-12-10 15:51:29.79136394 +0000 UTC m=+1863.708310567" Dec 10 15:51:32 crc kubenswrapper[4669]: I1210 15:51:32.064039 4669 scope.go:117] "RemoveContainer" containerID="ec8a44b8eb017eaceb9c4f296e7f2b645a986f540fefda606a917f5a35a81199" Dec 10 15:51:32 crc kubenswrapper[4669]: I1210 15:51:32.133142 4669 scope.go:117] "RemoveContainer" containerID="1d8f70c1aba1cfbeb11b1d0af575830e40902737de62f9d8598ad9901254cf47" Dec 10 15:51:32 crc kubenswrapper[4669]: I1210 15:51:32.202698 4669 scope.go:117] "RemoveContainer" containerID="27b23a0805fdeb0f1bb404825704f44ea46821aa6e595ab90994396db123b2d7" Dec 10 15:51:34 crc kubenswrapper[4669]: I1210 15:51:34.398451 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:51:34 crc kubenswrapper[4669]: E1210 15:51:34.399133 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:51:38 crc kubenswrapper[4669]: I1210 15:51:38.855953 4669 generic.go:334] "Generic (PLEG): container finished" podID="a7f38325-ba76-4bb5-85c2-b0fc44213a71" containerID="daadd32f516c7ed0f64c8baf8be1a76e3fdd0070440ef2d660b1a7cb80861c95" exitCode=0 Dec 10 15:51:38 crc kubenswrapper[4669]: I1210 15:51:38.856465 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" event={"ID":"a7f38325-ba76-4bb5-85c2-b0fc44213a71","Type":"ContainerDied","Data":"daadd32f516c7ed0f64c8baf8be1a76e3fdd0070440ef2d660b1a7cb80861c95"} Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.294668 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.367060 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-ssh-key\") pod \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.367119 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-inventory\") pod \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.367174 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dd42f\" (UniqueName: \"kubernetes.io/projected/a7f38325-ba76-4bb5-85c2-b0fc44213a71-kube-api-access-dd42f\") pod \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\" (UID: \"a7f38325-ba76-4bb5-85c2-b0fc44213a71\") " Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.375997 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f38325-ba76-4bb5-85c2-b0fc44213a71-kube-api-access-dd42f" (OuterVolumeSpecName: "kube-api-access-dd42f") pod "a7f38325-ba76-4bb5-85c2-b0fc44213a71" (UID: "a7f38325-ba76-4bb5-85c2-b0fc44213a71"). InnerVolumeSpecName "kube-api-access-dd42f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.395121 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a7f38325-ba76-4bb5-85c2-b0fc44213a71" (UID: "a7f38325-ba76-4bb5-85c2-b0fc44213a71"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.403271 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-inventory" (OuterVolumeSpecName: "inventory") pod "a7f38325-ba76-4bb5-85c2-b0fc44213a71" (UID: "a7f38325-ba76-4bb5-85c2-b0fc44213a71"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.469919 4669 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.470212 4669 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a7f38325-ba76-4bb5-85c2-b0fc44213a71-inventory\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.470306 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dd42f\" (UniqueName: \"kubernetes.io/projected/a7f38325-ba76-4bb5-85c2-b0fc44213a71-kube-api-access-dd42f\") on node \"crc\" DevicePath \"\"" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.875448 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" event={"ID":"a7f38325-ba76-4bb5-85c2-b0fc44213a71","Type":"ContainerDied","Data":"22a3470e469dc3152453ae26877be7b4208f1203dbe906446a1b8a5c9761f5b2"} Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.875728 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22a3470e469dc3152453ae26877be7b4208f1203dbe906446a1b8a5c9761f5b2" Dec 10 15:51:40 crc kubenswrapper[4669]: I1210 15:51:40.875511 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk" Dec 10 15:51:43 crc kubenswrapper[4669]: I1210 15:51:43.049720 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-dszds"] Dec 10 15:51:43 crc kubenswrapper[4669]: I1210 15:51:43.057156 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-dszds"] Dec 10 15:51:44 crc kubenswrapper[4669]: I1210 15:51:44.412827 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="873dc098-d2be-4293-8179-167941e30e1e" path="/var/lib/kubelet/pods/873dc098-d2be-4293-8179-167941e30e1e/volumes" Dec 10 15:51:49 crc kubenswrapper[4669]: I1210 15:51:49.398934 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:51:49 crc kubenswrapper[4669]: E1210 15:51:49.401544 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:52:01 crc kubenswrapper[4669]: I1210 15:52:01.398049 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:52:01 crc kubenswrapper[4669]: E1210 15:52:01.398726 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:52:12 crc kubenswrapper[4669]: I1210 15:52:12.398399 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:52:12 crc kubenswrapper[4669]: E1210 15:52:12.399031 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:52:27 crc kubenswrapper[4669]: I1210 15:52:27.399156 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:52:27 crc kubenswrapper[4669]: E1210 15:52:27.400341 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:52:32 crc kubenswrapper[4669]: I1210 15:52:32.299739 4669 scope.go:117] "RemoveContainer" containerID="445fefb3ebf011927ebd7c50db2fd00f3e4999caf3058e5099800a3180eafb12" Dec 10 15:52:40 crc kubenswrapper[4669]: I1210 15:52:40.398045 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:52:40 crc kubenswrapper[4669]: E1210 15:52:40.400472 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:52:51 crc kubenswrapper[4669]: I1210 15:52:51.398852 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:52:51 crc kubenswrapper[4669]: E1210 15:52:51.399800 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:53:03 crc kubenswrapper[4669]: I1210 15:53:03.398791 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:53:03 crc kubenswrapper[4669]: I1210 15:53:03.785266 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"a5a1989b7116ed9e8655569d3bb8c3b49ea98622b68772ed51b4984f5fad6f0e"} Dec 10 15:53:33 crc kubenswrapper[4669]: I1210 15:53:33.893933 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kr84h"] Dec 10 15:53:33 crc kubenswrapper[4669]: E1210 15:53:33.904268 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f38325-ba76-4bb5-85c2-b0fc44213a71" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:53:33 crc kubenswrapper[4669]: I1210 15:53:33.904315 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f38325-ba76-4bb5-85c2-b0fc44213a71" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:53:33 crc kubenswrapper[4669]: I1210 15:53:33.905026 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7f38325-ba76-4bb5-85c2-b0fc44213a71" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 10 15:53:33 crc kubenswrapper[4669]: I1210 15:53:33.921878 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:33 crc kubenswrapper[4669]: I1210 15:53:33.929352 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr84h"] Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.057361 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fl2hk\" (UniqueName: \"kubernetes.io/projected/f1abb4ff-31b5-46e6-b036-8a998009eaeb-kube-api-access-fl2hk\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.057440 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1abb4ff-31b5-46e6-b036-8a998009eaeb-catalog-content\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.057628 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1abb4ff-31b5-46e6-b036-8a998009eaeb-utilities\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.159290 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1abb4ff-31b5-46e6-b036-8a998009eaeb-catalog-content\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.159363 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1abb4ff-31b5-46e6-b036-8a998009eaeb-utilities\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.159454 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fl2hk\" (UniqueName: \"kubernetes.io/projected/f1abb4ff-31b5-46e6-b036-8a998009eaeb-kube-api-access-fl2hk\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.160154 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1abb4ff-31b5-46e6-b036-8a998009eaeb-catalog-content\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.160436 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1abb4ff-31b5-46e6-b036-8a998009eaeb-utilities\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.190888 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fl2hk\" (UniqueName: \"kubernetes.io/projected/f1abb4ff-31b5-46e6-b036-8a998009eaeb-kube-api-access-fl2hk\") pod \"redhat-operators-kr84h\" (UID: \"f1abb4ff-31b5-46e6-b036-8a998009eaeb\") " pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.274655 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:34 crc kubenswrapper[4669]: W1210 15:53:34.742394 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1abb4ff_31b5_46e6_b036_8a998009eaeb.slice/crio-35711c3fd605580a7bfa2d88e22518fd687fb0951a43b9e658bd29b844e11d11 WatchSource:0}: Error finding container 35711c3fd605580a7bfa2d88e22518fd687fb0951a43b9e658bd29b844e11d11: Status 404 returned error can't find the container with id 35711c3fd605580a7bfa2d88e22518fd687fb0951a43b9e658bd29b844e11d11 Dec 10 15:53:34 crc kubenswrapper[4669]: I1210 15:53:34.744025 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr84h"] Dec 10 15:53:35 crc kubenswrapper[4669]: I1210 15:53:35.059136 4669 generic.go:334] "Generic (PLEG): container finished" podID="f1abb4ff-31b5-46e6-b036-8a998009eaeb" containerID="767ec5d920415b37091f5961efab7398ef0ed0a494867d508d06663841f20380" exitCode=0 Dec 10 15:53:35 crc kubenswrapper[4669]: I1210 15:53:35.059176 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr84h" event={"ID":"f1abb4ff-31b5-46e6-b036-8a998009eaeb","Type":"ContainerDied","Data":"767ec5d920415b37091f5961efab7398ef0ed0a494867d508d06663841f20380"} Dec 10 15:53:35 crc kubenswrapper[4669]: I1210 15:53:35.059201 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr84h" event={"ID":"f1abb4ff-31b5-46e6-b036-8a998009eaeb","Type":"ContainerStarted","Data":"35711c3fd605580a7bfa2d88e22518fd687fb0951a43b9e658bd29b844e11d11"} Dec 10 15:53:46 crc kubenswrapper[4669]: I1210 15:53:46.166176 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr84h" event={"ID":"f1abb4ff-31b5-46e6-b036-8a998009eaeb","Type":"ContainerStarted","Data":"57c1ac344c506531a0b06ce3eb796a98d1176905178092a31393ed62bbc0dc5f"} Dec 10 15:53:47 crc kubenswrapper[4669]: I1210 15:53:47.175316 4669 generic.go:334] "Generic (PLEG): container finished" podID="f1abb4ff-31b5-46e6-b036-8a998009eaeb" containerID="57c1ac344c506531a0b06ce3eb796a98d1176905178092a31393ed62bbc0dc5f" exitCode=0 Dec 10 15:53:47 crc kubenswrapper[4669]: I1210 15:53:47.175395 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr84h" event={"ID":"f1abb4ff-31b5-46e6-b036-8a998009eaeb","Type":"ContainerDied","Data":"57c1ac344c506531a0b06ce3eb796a98d1176905178092a31393ed62bbc0dc5f"} Dec 10 15:53:47 crc kubenswrapper[4669]: E1210 15:53:47.276719 4669 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1abb4ff_31b5_46e6_b036_8a998009eaeb.slice/crio-conmon-57c1ac344c506531a0b06ce3eb796a98d1176905178092a31393ed62bbc0dc5f.scope\": RecentStats: unable to find data in memory cache]" Dec 10 15:53:49 crc kubenswrapper[4669]: I1210 15:53:49.192738 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kr84h" event={"ID":"f1abb4ff-31b5-46e6-b036-8a998009eaeb","Type":"ContainerStarted","Data":"7f258bb0aea947b8085aee76ed3fa544c19c6f4297aa962d77c44a7d289622c6"} Dec 10 15:53:49 crc kubenswrapper[4669]: I1210 15:53:49.215659 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kr84h" podStartSLOduration=2.897658259 podStartE2EDuration="16.215636767s" podCreationTimestamp="2025-12-10 15:53:33 +0000 UTC" firstStartedPulling="2025-12-10 15:53:35.061124304 +0000 UTC m=+1988.978070931" lastFinishedPulling="2025-12-10 15:53:48.379102822 +0000 UTC m=+2002.296049439" observedRunningTime="2025-12-10 15:53:49.211771013 +0000 UTC m=+2003.128717660" watchObservedRunningTime="2025-12-10 15:53:49.215636767 +0000 UTC m=+2003.132583394" Dec 10 15:53:54 crc kubenswrapper[4669]: I1210 15:53:54.275769 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:54 crc kubenswrapper[4669]: I1210 15:53:54.276305 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:53:55 crc kubenswrapper[4669]: I1210 15:53:55.326344 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-kr84h" podUID="f1abb4ff-31b5-46e6-b036-8a998009eaeb" containerName="registry-server" probeResult="failure" output=< Dec 10 15:53:55 crc kubenswrapper[4669]: timeout: failed to connect service ":50051" within 1s Dec 10 15:53:55 crc kubenswrapper[4669]: > Dec 10 15:54:04 crc kubenswrapper[4669]: I1210 15:54:04.316605 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:54:04 crc kubenswrapper[4669]: I1210 15:54:04.363520 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kr84h" Dec 10 15:54:04 crc kubenswrapper[4669]: I1210 15:54:04.921592 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kr84h"] Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.098826 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtmp8"] Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.099128 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-xtmp8" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="registry-server" containerID="cri-o://d0666cf720f057418a521dee51d0427df03f5938b4ee4e7fa33bf0f8555fb69b" gracePeriod=2 Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.404055 4669 generic.go:334] "Generic (PLEG): container finished" podID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerID="d0666cf720f057418a521dee51d0427df03f5938b4ee4e7fa33bf0f8555fb69b" exitCode=0 Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.405029 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerDied","Data":"d0666cf720f057418a521dee51d0427df03f5938b4ee4e7fa33bf0f8555fb69b"} Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.728665 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.750543 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg7kt\" (UniqueName: \"kubernetes.io/projected/35fc6428-e432-43a3-9ba3-09e64c8c3c65-kube-api-access-pg7kt\") pod \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.750596 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-utilities\") pod \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.750670 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-catalog-content\") pod \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\" (UID: \"35fc6428-e432-43a3-9ba3-09e64c8c3c65\") " Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.751579 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-utilities" (OuterVolumeSpecName: "utilities") pod "35fc6428-e432-43a3-9ba3-09e64c8c3c65" (UID: "35fc6428-e432-43a3-9ba3-09e64c8c3c65"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.760132 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35fc6428-e432-43a3-9ba3-09e64c8c3c65-kube-api-access-pg7kt" (OuterVolumeSpecName: "kube-api-access-pg7kt") pod "35fc6428-e432-43a3-9ba3-09e64c8c3c65" (UID: "35fc6428-e432-43a3-9ba3-09e64c8c3c65"). InnerVolumeSpecName "kube-api-access-pg7kt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.851925 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg7kt\" (UniqueName: \"kubernetes.io/projected/35fc6428-e432-43a3-9ba3-09e64c8c3c65-kube-api-access-pg7kt\") on node \"crc\" DevicePath \"\"" Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.851968 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.862988 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35fc6428-e432-43a3-9ba3-09e64c8c3c65" (UID: "35fc6428-e432-43a3-9ba3-09e64c8c3c65"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:54:05 crc kubenswrapper[4669]: I1210 15:54:05.953320 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35fc6428-e432-43a3-9ba3-09e64c8c3c65-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.412983 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-xtmp8" event={"ID":"35fc6428-e432-43a3-9ba3-09e64c8c3c65","Type":"ContainerDied","Data":"44a311a72324fe1cb67d9fc8e69cddf5f07546382de26f065308c7eab8819997"} Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.413028 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-xtmp8" Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.413033 4669 scope.go:117] "RemoveContainer" containerID="d0666cf720f057418a521dee51d0427df03f5938b4ee4e7fa33bf0f8555fb69b" Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.432556 4669 scope.go:117] "RemoveContainer" containerID="110d01addc3a853b584f6557a186ddb2bd47464f301393b3a685056dbe0fdbbb" Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.452885 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-xtmp8"] Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.467779 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-xtmp8"] Dec 10 15:54:06 crc kubenswrapper[4669]: I1210 15:54:06.477848 4669 scope.go:117] "RemoveContainer" containerID="cf553c53cb60b2de2244efdbb2c0a288f1018f5525242b2d509e07ef1a1fa099" Dec 10 15:54:08 crc kubenswrapper[4669]: I1210 15:54:08.410474 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" path="/var/lib/kubelet/pods/35fc6428-e432-43a3-9ba3-09e64c8c3c65/volumes" Dec 10 15:55:28 crc kubenswrapper[4669]: I1210 15:55:28.745452 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:55:28 crc kubenswrapper[4669]: I1210 15:55:28.746104 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:55:58 crc kubenswrapper[4669]: I1210 15:55:58.745110 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:55:58 crc kubenswrapper[4669]: I1210 15:55:58.745857 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.044191 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v6lnf"] Dec 10 15:56:27 crc kubenswrapper[4669]: E1210 15:56:27.045077 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="registry-server" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.045093 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="registry-server" Dec 10 15:56:27 crc kubenswrapper[4669]: E1210 15:56:27.045120 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="extract-content" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.045128 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="extract-content" Dec 10 15:56:27 crc kubenswrapper[4669]: E1210 15:56:27.045151 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="extract-utilities" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.045158 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="extract-utilities" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.045371 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="35fc6428-e432-43a3-9ba3-09e64c8c3c65" containerName="registry-server" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.046944 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.055970 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v6lnf"] Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.185513 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-catalog-content\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.185619 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-utilities\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.185672 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tlz6v\" (UniqueName: \"kubernetes.io/projected/d9e97e07-62a4-49f5-baf8-b38316727076-kube-api-access-tlz6v\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.287519 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-catalog-content\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.287821 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-utilities\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.287954 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tlz6v\" (UniqueName: \"kubernetes.io/projected/d9e97e07-62a4-49f5-baf8-b38316727076-kube-api-access-tlz6v\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.288102 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-catalog-content\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.288459 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-utilities\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.311985 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tlz6v\" (UniqueName: \"kubernetes.io/projected/d9e97e07-62a4-49f5-baf8-b38316727076-kube-api-access-tlz6v\") pod \"certified-operators-v6lnf\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.370923 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:27 crc kubenswrapper[4669]: I1210 15:56:27.902000 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v6lnf"] Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.745435 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.745759 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.745834 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.746793 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a5a1989b7116ed9e8655569d3bb8c3b49ea98622b68772ed51b4984f5fad6f0e"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.746873 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://a5a1989b7116ed9e8655569d3bb8c3b49ea98622b68772ed51b4984f5fad6f0e" gracePeriod=600 Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.747707 4669 generic.go:334] "Generic (PLEG): container finished" podID="d9e97e07-62a4-49f5-baf8-b38316727076" containerID="cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73" exitCode=0 Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.747763 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6lnf" event={"ID":"d9e97e07-62a4-49f5-baf8-b38316727076","Type":"ContainerDied","Data":"cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73"} Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.747795 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6lnf" event={"ID":"d9e97e07-62a4-49f5-baf8-b38316727076","Type":"ContainerStarted","Data":"af47e63f412597c2ceeb1fcaf7f68cd92bb4bf88b00d97b36da8d736c7a5d3ce"} Dec 10 15:56:28 crc kubenswrapper[4669]: I1210 15:56:28.754121 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 15:56:29 crc kubenswrapper[4669]: I1210 15:56:29.770233 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="a5a1989b7116ed9e8655569d3bb8c3b49ea98622b68772ed51b4984f5fad6f0e" exitCode=0 Dec 10 15:56:29 crc kubenswrapper[4669]: I1210 15:56:29.770359 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"a5a1989b7116ed9e8655569d3bb8c3b49ea98622b68772ed51b4984f5fad6f0e"} Dec 10 15:56:29 crc kubenswrapper[4669]: I1210 15:56:29.770873 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7"} Dec 10 15:56:29 crc kubenswrapper[4669]: I1210 15:56:29.770903 4669 scope.go:117] "RemoveContainer" containerID="c870713812ae7f1a29c89240a10fd9ae8961e2d64f0121c43367e7ad7e0d2591" Dec 10 15:56:30 crc kubenswrapper[4669]: I1210 15:56:30.782962 4669 generic.go:334] "Generic (PLEG): container finished" podID="d9e97e07-62a4-49f5-baf8-b38316727076" containerID="4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c" exitCode=0 Dec 10 15:56:30 crc kubenswrapper[4669]: I1210 15:56:30.783200 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6lnf" event={"ID":"d9e97e07-62a4-49f5-baf8-b38316727076","Type":"ContainerDied","Data":"4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c"} Dec 10 15:56:31 crc kubenswrapper[4669]: I1210 15:56:31.838555 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6lnf" event={"ID":"d9e97e07-62a4-49f5-baf8-b38316727076","Type":"ContainerStarted","Data":"53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223"} Dec 10 15:56:31 crc kubenswrapper[4669]: I1210 15:56:31.860185 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v6lnf" podStartSLOduration=2.332088968 podStartE2EDuration="4.860165883s" podCreationTimestamp="2025-12-10 15:56:27 +0000 UTC" firstStartedPulling="2025-12-10 15:56:28.75377984 +0000 UTC m=+2162.670726467" lastFinishedPulling="2025-12-10 15:56:31.281856755 +0000 UTC m=+2165.198803382" observedRunningTime="2025-12-10 15:56:31.858013471 +0000 UTC m=+2165.774960108" watchObservedRunningTime="2025-12-10 15:56:31.860165883 +0000 UTC m=+2165.777112520" Dec 10 15:56:37 crc kubenswrapper[4669]: I1210 15:56:37.372116 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:37 crc kubenswrapper[4669]: I1210 15:56:37.372621 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:37 crc kubenswrapper[4669]: I1210 15:56:37.433716 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:37 crc kubenswrapper[4669]: I1210 15:56:37.962924 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:38 crc kubenswrapper[4669]: I1210 15:56:38.028011 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v6lnf"] Dec 10 15:56:39 crc kubenswrapper[4669]: I1210 15:56:39.923175 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v6lnf" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="registry-server" containerID="cri-o://53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223" gracePeriod=2 Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.369589 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.396702 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-utilities\") pod \"d9e97e07-62a4-49f5-baf8-b38316727076\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.397122 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-catalog-content\") pod \"d9e97e07-62a4-49f5-baf8-b38316727076\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.397292 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tlz6v\" (UniqueName: \"kubernetes.io/projected/d9e97e07-62a4-49f5-baf8-b38316727076-kube-api-access-tlz6v\") pod \"d9e97e07-62a4-49f5-baf8-b38316727076\" (UID: \"d9e97e07-62a4-49f5-baf8-b38316727076\") " Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.398152 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-utilities" (OuterVolumeSpecName: "utilities") pod "d9e97e07-62a4-49f5-baf8-b38316727076" (UID: "d9e97e07-62a4-49f5-baf8-b38316727076"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.402985 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9e97e07-62a4-49f5-baf8-b38316727076-kube-api-access-tlz6v" (OuterVolumeSpecName: "kube-api-access-tlz6v") pod "d9e97e07-62a4-49f5-baf8-b38316727076" (UID: "d9e97e07-62a4-49f5-baf8-b38316727076"). InnerVolumeSpecName "kube-api-access-tlz6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.499545 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tlz6v\" (UniqueName: \"kubernetes.io/projected/d9e97e07-62a4-49f5-baf8-b38316727076-kube-api-access-tlz6v\") on node \"crc\" DevicePath \"\"" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.499578 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.709550 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9e97e07-62a4-49f5-baf8-b38316727076" (UID: "d9e97e07-62a4-49f5-baf8-b38316727076"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.807796 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9e97e07-62a4-49f5-baf8-b38316727076-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.932766 4669 generic.go:334] "Generic (PLEG): container finished" podID="d9e97e07-62a4-49f5-baf8-b38316727076" containerID="53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223" exitCode=0 Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.932818 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6lnf" event={"ID":"d9e97e07-62a4-49f5-baf8-b38316727076","Type":"ContainerDied","Data":"53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223"} Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.932880 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v6lnf" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.932901 4669 scope.go:117] "RemoveContainer" containerID="53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.932886 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v6lnf" event={"ID":"d9e97e07-62a4-49f5-baf8-b38316727076","Type":"ContainerDied","Data":"af47e63f412597c2ceeb1fcaf7f68cd92bb4bf88b00d97b36da8d736c7a5d3ce"} Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.963394 4669 scope.go:117] "RemoveContainer" containerID="4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c" Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.972181 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v6lnf"] Dec 10 15:56:40 crc kubenswrapper[4669]: I1210 15:56:40.981138 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v6lnf"] Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.001146 4669 scope.go:117] "RemoveContainer" containerID="cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73" Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.029872 4669 scope.go:117] "RemoveContainer" containerID="53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223" Dec 10 15:56:41 crc kubenswrapper[4669]: E1210 15:56:41.030370 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223\": container with ID starting with 53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223 not found: ID does not exist" containerID="53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223" Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.030409 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223"} err="failed to get container status \"53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223\": rpc error: code = NotFound desc = could not find container \"53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223\": container with ID starting with 53dfbfe4c420399e8ac0ec6249c6674e485f6fae523352e0c54e0387c974e223 not found: ID does not exist" Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.030431 4669 scope.go:117] "RemoveContainer" containerID="4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c" Dec 10 15:56:41 crc kubenswrapper[4669]: E1210 15:56:41.030708 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c\": container with ID starting with 4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c not found: ID does not exist" containerID="4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c" Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.030755 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c"} err="failed to get container status \"4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c\": rpc error: code = NotFound desc = could not find container \"4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c\": container with ID starting with 4eef730b902eb245fc64eac8cc0fc50e52e822d84d7b547ea9d93f83b3db251c not found: ID does not exist" Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.030799 4669 scope.go:117] "RemoveContainer" containerID="cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73" Dec 10 15:56:41 crc kubenswrapper[4669]: E1210 15:56:41.031103 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73\": container with ID starting with cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73 not found: ID does not exist" containerID="cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73" Dec 10 15:56:41 crc kubenswrapper[4669]: I1210 15:56:41.031128 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73"} err="failed to get container status \"cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73\": rpc error: code = NotFound desc = could not find container \"cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73\": container with ID starting with cad6a9561e17057c1728bc097d6bdf06c23b17e4f80eae30e22bea3738bfba73 not found: ID does not exist" Dec 10 15:56:42 crc kubenswrapper[4669]: I1210 15:56:42.410846 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" path="/var/lib/kubelet/pods/d9e97e07-62a4-49f5-baf8-b38316727076/volumes" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.569243 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dkxdz/must-gather-bclxt"] Dec 10 15:56:58 crc kubenswrapper[4669]: E1210 15:56:58.570040 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="extract-utilities" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.570060 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="extract-utilities" Dec 10 15:56:58 crc kubenswrapper[4669]: E1210 15:56:58.570081 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="registry-server" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.570088 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="registry-server" Dec 10 15:56:58 crc kubenswrapper[4669]: E1210 15:56:58.570103 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="extract-content" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.570109 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="extract-content" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.570290 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9e97e07-62a4-49f5-baf8-b38316727076" containerName="registry-server" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.571203 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.581770 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dkxdz"/"openshift-service-ca.crt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.581863 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-dkxdz"/"kube-root-ca.crt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.613296 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dkxdz/must-gather-bclxt"] Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.676597 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgnvw\" (UniqueName: \"kubernetes.io/projected/7adf36d6-68da-47a1-98e4-203642fff3df-kube-api-access-rgnvw\") pod \"must-gather-bclxt\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.676810 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7adf36d6-68da-47a1-98e4-203642fff3df-must-gather-output\") pod \"must-gather-bclxt\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.778237 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7adf36d6-68da-47a1-98e4-203642fff3df-must-gather-output\") pod \"must-gather-bclxt\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.778410 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgnvw\" (UniqueName: \"kubernetes.io/projected/7adf36d6-68da-47a1-98e4-203642fff3df-kube-api-access-rgnvw\") pod \"must-gather-bclxt\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.778689 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7adf36d6-68da-47a1-98e4-203642fff3df-must-gather-output\") pod \"must-gather-bclxt\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.800245 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgnvw\" (UniqueName: \"kubernetes.io/projected/7adf36d6-68da-47a1-98e4-203642fff3df-kube-api-access-rgnvw\") pod \"must-gather-bclxt\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:58 crc kubenswrapper[4669]: I1210 15:56:58.893688 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 15:56:59 crc kubenswrapper[4669]: I1210 15:56:59.392563 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-dkxdz/must-gather-bclxt"] Dec 10 15:57:00 crc kubenswrapper[4669]: I1210 15:57:00.123243 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/must-gather-bclxt" event={"ID":"7adf36d6-68da-47a1-98e4-203642fff3df","Type":"ContainerStarted","Data":"1a5a68658dc58f4374ab32fb97522e3c7750d9304d5a1df3faf122b12316bb92"} Dec 10 15:57:08 crc kubenswrapper[4669]: I1210 15:57:08.199192 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/must-gather-bclxt" event={"ID":"7adf36d6-68da-47a1-98e4-203642fff3df","Type":"ContainerStarted","Data":"ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a"} Dec 10 15:57:09 crc kubenswrapper[4669]: I1210 15:57:09.210423 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/must-gather-bclxt" event={"ID":"7adf36d6-68da-47a1-98e4-203642fff3df","Type":"ContainerStarted","Data":"3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73"} Dec 10 15:57:09 crc kubenswrapper[4669]: I1210 15:57:09.234568 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dkxdz/must-gather-bclxt" podStartSLOduration=2.837848019 podStartE2EDuration="11.234543769s" podCreationTimestamp="2025-12-10 15:56:58 +0000 UTC" firstStartedPulling="2025-12-10 15:56:59.393405542 +0000 UTC m=+2193.310352169" lastFinishedPulling="2025-12-10 15:57:07.790101282 +0000 UTC m=+2201.707047919" observedRunningTime="2025-12-10 15:57:09.230052449 +0000 UTC m=+2203.146999076" watchObservedRunningTime="2025-12-10 15:57:09.234543769 +0000 UTC m=+2203.151490406" Dec 10 15:57:11 crc kubenswrapper[4669]: I1210 15:57:11.789173 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dkxdz/crc-debug-kqlnz"] Dec 10 15:57:11 crc kubenswrapper[4669]: I1210 15:57:11.793323 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:11 crc kubenswrapper[4669]: I1210 15:57:11.797602 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-dkxdz"/"default-dockercfg-rdv4q" Dec 10 15:57:11 crc kubenswrapper[4669]: I1210 15:57:11.965208 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct85b\" (UniqueName: \"kubernetes.io/projected/0a852cea-b3a2-42f9-9526-80a98e43ce39-kube-api-access-ct85b\") pod \"crc-debug-kqlnz\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:11 crc kubenswrapper[4669]: I1210 15:57:11.965311 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a852cea-b3a2-42f9-9526-80a98e43ce39-host\") pod \"crc-debug-kqlnz\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:12 crc kubenswrapper[4669]: I1210 15:57:12.067019 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct85b\" (UniqueName: \"kubernetes.io/projected/0a852cea-b3a2-42f9-9526-80a98e43ce39-kube-api-access-ct85b\") pod \"crc-debug-kqlnz\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:12 crc kubenswrapper[4669]: I1210 15:57:12.067135 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a852cea-b3a2-42f9-9526-80a98e43ce39-host\") pod \"crc-debug-kqlnz\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:12 crc kubenswrapper[4669]: I1210 15:57:12.067267 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a852cea-b3a2-42f9-9526-80a98e43ce39-host\") pod \"crc-debug-kqlnz\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:12 crc kubenswrapper[4669]: I1210 15:57:12.103876 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct85b\" (UniqueName: \"kubernetes.io/projected/0a852cea-b3a2-42f9-9526-80a98e43ce39-kube-api-access-ct85b\") pod \"crc-debug-kqlnz\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:12 crc kubenswrapper[4669]: I1210 15:57:12.143373 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:12 crc kubenswrapper[4669]: W1210 15:57:12.176713 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a852cea_b3a2_42f9_9526_80a98e43ce39.slice/crio-0a772d728ecbd47fdab07a5241f159341be624975b62fb727e8ca07b24320d04 WatchSource:0}: Error finding container 0a772d728ecbd47fdab07a5241f159341be624975b62fb727e8ca07b24320d04: Status 404 returned error can't find the container with id 0a772d728ecbd47fdab07a5241f159341be624975b62fb727e8ca07b24320d04 Dec 10 15:57:12 crc kubenswrapper[4669]: I1210 15:57:12.281382 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" event={"ID":"0a852cea-b3a2-42f9-9526-80a98e43ce39","Type":"ContainerStarted","Data":"0a772d728ecbd47fdab07a5241f159341be624975b62fb727e8ca07b24320d04"} Dec 10 15:57:22 crc kubenswrapper[4669]: I1210 15:57:22.848490 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-czsmp"] Dec 10 15:57:22 crc kubenswrapper[4669]: I1210 15:57:22.851003 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:22 crc kubenswrapper[4669]: I1210 15:57:22.881882 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-czsmp"] Dec 10 15:57:22 crc kubenswrapper[4669]: I1210 15:57:22.900764 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-utilities\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:22 crc kubenswrapper[4669]: I1210 15:57:22.900901 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-catalog-content\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:22 crc kubenswrapper[4669]: I1210 15:57:22.901013 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsdn8\" (UniqueName: \"kubernetes.io/projected/008809e9-7d90-42f8-8854-d686d433f7de-kube-api-access-xsdn8\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.002629 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-utilities\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.002732 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-catalog-content\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.002804 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsdn8\" (UniqueName: \"kubernetes.io/projected/008809e9-7d90-42f8-8854-d686d433f7de-kube-api-access-xsdn8\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.003108 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-utilities\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.003365 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-catalog-content\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.021527 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsdn8\" (UniqueName: \"kubernetes.io/projected/008809e9-7d90-42f8-8854-d686d433f7de-kube-api-access-xsdn8\") pod \"redhat-marketplace-czsmp\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.082408 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bpz4d"] Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.086378 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.105071 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bpz4d"] Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.175231 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.217133 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-catalog-content\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.217254 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-utilities\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.217279 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zsbvg\" (UniqueName: \"kubernetes.io/projected/f1d7c084-21af-4843-b1e1-7d7765ca2794-kube-api-access-zsbvg\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.318957 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-catalog-content\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.319061 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-utilities\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.319083 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zsbvg\" (UniqueName: \"kubernetes.io/projected/f1d7c084-21af-4843-b1e1-7d7765ca2794-kube-api-access-zsbvg\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.319765 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-catalog-content\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.319863 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-utilities\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.341337 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zsbvg\" (UniqueName: \"kubernetes.io/projected/f1d7c084-21af-4843-b1e1-7d7765ca2794-kube-api-access-zsbvg\") pod \"community-operators-bpz4d\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.394926 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" event={"ID":"0a852cea-b3a2-42f9-9526-80a98e43ce39","Type":"ContainerStarted","Data":"b4586fc6664c218acae71e60c55b2f7e4bd104b3279f17f6c9989871c9a8be5b"} Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.421109 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.435329 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" podStartSLOduration=2.334188083 podStartE2EDuration="12.435311753s" podCreationTimestamp="2025-12-10 15:57:11 +0000 UTC" firstStartedPulling="2025-12-10 15:57:12.179412433 +0000 UTC m=+2206.096359080" lastFinishedPulling="2025-12-10 15:57:22.280536123 +0000 UTC m=+2216.197482750" observedRunningTime="2025-12-10 15:57:23.414985788 +0000 UTC m=+2217.331932435" watchObservedRunningTime="2025-12-10 15:57:23.435311753 +0000 UTC m=+2217.352258380" Dec 10 15:57:23 crc kubenswrapper[4669]: I1210 15:57:23.951887 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-czsmp"] Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.183902 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bpz4d"] Dec 10 15:57:24 crc kubenswrapper[4669]: W1210 15:57:24.185235 4669 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf1d7c084_21af_4843_b1e1_7d7765ca2794.slice/crio-5dc5dc5cbaf25afc278b484acdadacaaeac762c90775bf3e890ba8a67d74a20d WatchSource:0}: Error finding container 5dc5dc5cbaf25afc278b484acdadacaaeac762c90775bf3e890ba8a67d74a20d: Status 404 returned error can't find the container with id 5dc5dc5cbaf25afc278b484acdadacaaeac762c90775bf3e890ba8a67d74a20d Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.408252 4669 generic.go:334] "Generic (PLEG): container finished" podID="008809e9-7d90-42f8-8854-d686d433f7de" containerID="7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361" exitCode=0 Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.408333 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-czsmp" event={"ID":"008809e9-7d90-42f8-8854-d686d433f7de","Type":"ContainerDied","Data":"7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361"} Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.408604 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-czsmp" event={"ID":"008809e9-7d90-42f8-8854-d686d433f7de","Type":"ContainerStarted","Data":"7a4165fb70db3897705a8e3380b3a1ebeae3c4ffa42dc0c8d09f5b047ac14666"} Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.411096 4669 generic.go:334] "Generic (PLEG): container finished" podID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerID="b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a" exitCode=0 Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.411173 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerDied","Data":"b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a"} Dec 10 15:57:24 crc kubenswrapper[4669]: I1210 15:57:24.411209 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerStarted","Data":"5dc5dc5cbaf25afc278b484acdadacaaeac762c90775bf3e890ba8a67d74a20d"} Dec 10 15:57:25 crc kubenswrapper[4669]: I1210 15:57:25.424493 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerStarted","Data":"bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f"} Dec 10 15:57:27 crc kubenswrapper[4669]: I1210 15:57:27.447745 4669 generic.go:334] "Generic (PLEG): container finished" podID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerID="bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f" exitCode=0 Dec 10 15:57:27 crc kubenswrapper[4669]: I1210 15:57:27.447799 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerDied","Data":"bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f"} Dec 10 15:57:28 crc kubenswrapper[4669]: I1210 15:57:28.468325 4669 generic.go:334] "Generic (PLEG): container finished" podID="008809e9-7d90-42f8-8854-d686d433f7de" containerID="6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607" exitCode=0 Dec 10 15:57:28 crc kubenswrapper[4669]: I1210 15:57:28.468487 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-czsmp" event={"ID":"008809e9-7d90-42f8-8854-d686d433f7de","Type":"ContainerDied","Data":"6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607"} Dec 10 15:57:34 crc kubenswrapper[4669]: I1210 15:57:34.521495 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerStarted","Data":"8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd"} Dec 10 15:57:34 crc kubenswrapper[4669]: I1210 15:57:34.548418 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bpz4d" podStartSLOduration=1.868485995 podStartE2EDuration="11.54840036s" podCreationTimestamp="2025-12-10 15:57:23 +0000 UTC" firstStartedPulling="2025-12-10 15:57:24.413423286 +0000 UTC m=+2218.330369913" lastFinishedPulling="2025-12-10 15:57:34.093337651 +0000 UTC m=+2228.010284278" observedRunningTime="2025-12-10 15:57:34.541927922 +0000 UTC m=+2228.458874579" watchObservedRunningTime="2025-12-10 15:57:34.54840036 +0000 UTC m=+2228.465346977" Dec 10 15:57:35 crc kubenswrapper[4669]: I1210 15:57:35.535555 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-czsmp" event={"ID":"008809e9-7d90-42f8-8854-d686d433f7de","Type":"ContainerStarted","Data":"9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787"} Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.175948 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.176370 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.223597 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.250132 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-czsmp" podStartSLOduration=11.052629206 podStartE2EDuration="21.2501068s" podCreationTimestamp="2025-12-10 15:57:22 +0000 UTC" firstStartedPulling="2025-12-10 15:57:24.411242393 +0000 UTC m=+2218.328189020" lastFinishedPulling="2025-12-10 15:57:34.608719987 +0000 UTC m=+2228.525666614" observedRunningTime="2025-12-10 15:57:35.604300985 +0000 UTC m=+2229.521247612" watchObservedRunningTime="2025-12-10 15:57:43.2501068 +0000 UTC m=+2237.167053457" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.422259 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.422305 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.481902 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.767869 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:43 crc kubenswrapper[4669]: I1210 15:57:43.807994 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:44 crc kubenswrapper[4669]: I1210 15:57:44.726560 4669 generic.go:334] "Generic (PLEG): container finished" podID="0a852cea-b3a2-42f9-9526-80a98e43ce39" containerID="b4586fc6664c218acae71e60c55b2f7e4bd104b3279f17f6c9989871c9a8be5b" exitCode=0 Dec 10 15:57:44 crc kubenswrapper[4669]: I1210 15:57:44.726643 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" event={"ID":"0a852cea-b3a2-42f9-9526-80a98e43ce39","Type":"ContainerDied","Data":"b4586fc6664c218acae71e60c55b2f7e4bd104b3279f17f6c9989871c9a8be5b"} Dec 10 15:57:45 crc kubenswrapper[4669]: I1210 15:57:45.072107 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-czsmp"] Dec 10 15:57:45 crc kubenswrapper[4669]: I1210 15:57:45.735786 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-czsmp" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="registry-server" containerID="cri-o://9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787" gracePeriod=2 Dec 10 15:57:45 crc kubenswrapper[4669]: I1210 15:57:45.949911 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.000578 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dkxdz/crc-debug-kqlnz"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.014727 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dkxdz/crc-debug-kqlnz"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.029156 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a852cea-b3a2-42f9-9526-80a98e43ce39-host\") pod \"0a852cea-b3a2-42f9-9526-80a98e43ce39\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.029332 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ct85b\" (UniqueName: \"kubernetes.io/projected/0a852cea-b3a2-42f9-9526-80a98e43ce39-kube-api-access-ct85b\") pod \"0a852cea-b3a2-42f9-9526-80a98e43ce39\" (UID: \"0a852cea-b3a2-42f9-9526-80a98e43ce39\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.029332 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0a852cea-b3a2-42f9-9526-80a98e43ce39-host" (OuterVolumeSpecName: "host") pod "0a852cea-b3a2-42f9-9526-80a98e43ce39" (UID: "0a852cea-b3a2-42f9-9526-80a98e43ce39"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.029800 4669 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/0a852cea-b3a2-42f9-9526-80a98e43ce39-host\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.036988 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a852cea-b3a2-42f9-9526-80a98e43ce39-kube-api-access-ct85b" (OuterVolumeSpecName: "kube-api-access-ct85b") pod "0a852cea-b3a2-42f9-9526-80a98e43ce39" (UID: "0a852cea-b3a2-42f9-9526-80a98e43ce39"). InnerVolumeSpecName "kube-api-access-ct85b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.069803 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bpz4d"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.070005 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bpz4d" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="registry-server" containerID="cri-o://8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd" gracePeriod=2 Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.134065 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ct85b\" (UniqueName: \"kubernetes.io/projected/0a852cea-b3a2-42f9-9526-80a98e43ce39-kube-api-access-ct85b\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.254906 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.337511 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-catalog-content\") pod \"008809e9-7d90-42f8-8854-d686d433f7de\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.337587 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-utilities\") pod \"008809e9-7d90-42f8-8854-d686d433f7de\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.337630 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xsdn8\" (UniqueName: \"kubernetes.io/projected/008809e9-7d90-42f8-8854-d686d433f7de-kube-api-access-xsdn8\") pod \"008809e9-7d90-42f8-8854-d686d433f7de\" (UID: \"008809e9-7d90-42f8-8854-d686d433f7de\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.340585 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-utilities" (OuterVolumeSpecName: "utilities") pod "008809e9-7d90-42f8-8854-d686d433f7de" (UID: "008809e9-7d90-42f8-8854-d686d433f7de"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.342924 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/008809e9-7d90-42f8-8854-d686d433f7de-kube-api-access-xsdn8" (OuterVolumeSpecName: "kube-api-access-xsdn8") pod "008809e9-7d90-42f8-8854-d686d433f7de" (UID: "008809e9-7d90-42f8-8854-d686d433f7de"). InnerVolumeSpecName "kube-api-access-xsdn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.361429 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "008809e9-7d90-42f8-8854-d686d433f7de" (UID: "008809e9-7d90-42f8-8854-d686d433f7de"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.410948 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a852cea-b3a2-42f9-9526-80a98e43ce39" path="/var/lib/kubelet/pods/0a852cea-b3a2-42f9-9526-80a98e43ce39/volumes" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.442484 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.442521 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/008809e9-7d90-42f8-8854-d686d433f7de-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.442532 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xsdn8\" (UniqueName: \"kubernetes.io/projected/008809e9-7d90-42f8-8854-d686d433f7de-kube-api-access-xsdn8\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.491069 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.645578 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-utilities\") pod \"f1d7c084-21af-4843-b1e1-7d7765ca2794\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.645634 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-catalog-content\") pod \"f1d7c084-21af-4843-b1e1-7d7765ca2794\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.645720 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zsbvg\" (UniqueName: \"kubernetes.io/projected/f1d7c084-21af-4843-b1e1-7d7765ca2794-kube-api-access-zsbvg\") pod \"f1d7c084-21af-4843-b1e1-7d7765ca2794\" (UID: \"f1d7c084-21af-4843-b1e1-7d7765ca2794\") " Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.646596 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-utilities" (OuterVolumeSpecName: "utilities") pod "f1d7c084-21af-4843-b1e1-7d7765ca2794" (UID: "f1d7c084-21af-4843-b1e1-7d7765ca2794"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.650251 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1d7c084-21af-4843-b1e1-7d7765ca2794-kube-api-access-zsbvg" (OuterVolumeSpecName: "kube-api-access-zsbvg") pod "f1d7c084-21af-4843-b1e1-7d7765ca2794" (UID: "f1d7c084-21af-4843-b1e1-7d7765ca2794"). InnerVolumeSpecName "kube-api-access-zsbvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.701053 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f1d7c084-21af-4843-b1e1-7d7765ca2794" (UID: "f1d7c084-21af-4843-b1e1-7d7765ca2794"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.747318 4669 generic.go:334] "Generic (PLEG): container finished" podID="008809e9-7d90-42f8-8854-d686d433f7de" containerID="9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787" exitCode=0 Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.747431 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-czsmp" event={"ID":"008809e9-7d90-42f8-8854-d686d433f7de","Type":"ContainerDied","Data":"9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787"} Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.747499 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-czsmp" event={"ID":"008809e9-7d90-42f8-8854-d686d433f7de","Type":"ContainerDied","Data":"7a4165fb70db3897705a8e3380b3a1ebeae3c4ffa42dc0c8d09f5b047ac14666"} Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.747527 4669 scope.go:117] "RemoveContainer" containerID="9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.747801 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-czsmp" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.751974 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.752000 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f1d7c084-21af-4843-b1e1-7d7765ca2794-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.752023 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zsbvg\" (UniqueName: \"kubernetes.io/projected/f1d7c084-21af-4843-b1e1-7d7765ca2794-kube-api-access-zsbvg\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.753638 4669 generic.go:334] "Generic (PLEG): container finished" podID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerID="8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd" exitCode=0 Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.753709 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerDied","Data":"8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd"} Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.753742 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bpz4d" event={"ID":"f1d7c084-21af-4843-b1e1-7d7765ca2794","Type":"ContainerDied","Data":"5dc5dc5cbaf25afc278b484acdadacaaeac762c90775bf3e890ba8a67d74a20d"} Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.753858 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bpz4d" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.771244 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kqlnz" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.826520 4669 scope.go:117] "RemoveContainer" containerID="6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.828858 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-czsmp"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.838578 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-czsmp"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.854272 4669 scope.go:117] "RemoveContainer" containerID="7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.854803 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bpz4d"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.871964 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bpz4d"] Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.891586 4669 scope.go:117] "RemoveContainer" containerID="9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787" Dec 10 15:57:46 crc kubenswrapper[4669]: E1210 15:57:46.892304 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787\": container with ID starting with 9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787 not found: ID does not exist" containerID="9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.892334 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787"} err="failed to get container status \"9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787\": rpc error: code = NotFound desc = could not find container \"9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787\": container with ID starting with 9540720698ce959e05cb40bfa67b1bfdaf362cb319cf84284c17ba440f699787 not found: ID does not exist" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.892356 4669 scope.go:117] "RemoveContainer" containerID="6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607" Dec 10 15:57:46 crc kubenswrapper[4669]: E1210 15:57:46.892541 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607\": container with ID starting with 6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607 not found: ID does not exist" containerID="6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.892559 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607"} err="failed to get container status \"6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607\": rpc error: code = NotFound desc = could not find container \"6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607\": container with ID starting with 6e9849a97f2ae071f9c45b5426c1bd4ba71d2fe7e120e64dba19298741ead607 not found: ID does not exist" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.892572 4669 scope.go:117] "RemoveContainer" containerID="7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361" Dec 10 15:57:46 crc kubenswrapper[4669]: E1210 15:57:46.892781 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361\": container with ID starting with 7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361 not found: ID does not exist" containerID="7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.892800 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361"} err="failed to get container status \"7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361\": rpc error: code = NotFound desc = could not find container \"7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361\": container with ID starting with 7a87044366370f8e7ba17be19919fe9bf235dca2c0a095b9ed6b9171b7016361 not found: ID does not exist" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.892815 4669 scope.go:117] "RemoveContainer" containerID="8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.935437 4669 scope.go:117] "RemoveContainer" containerID="bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.956456 4669 scope.go:117] "RemoveContainer" containerID="b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.993909 4669 scope.go:117] "RemoveContainer" containerID="8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd" Dec 10 15:57:46 crc kubenswrapper[4669]: E1210 15:57:46.994437 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd\": container with ID starting with 8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd not found: ID does not exist" containerID="8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.994522 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd"} err="failed to get container status \"8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd\": rpc error: code = NotFound desc = could not find container \"8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd\": container with ID starting with 8dc501e8fd859e998d4b07a03a36b60ccbc95ca91a03ea838ad01aafbc9133fd not found: ID does not exist" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.994557 4669 scope.go:117] "RemoveContainer" containerID="bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f" Dec 10 15:57:46 crc kubenswrapper[4669]: E1210 15:57:46.994979 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f\": container with ID starting with bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f not found: ID does not exist" containerID="bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.995014 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f"} err="failed to get container status \"bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f\": rpc error: code = NotFound desc = could not find container \"bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f\": container with ID starting with bba6a777c8bb64d0153fdd06d92912ad0f0c4f9e017f3d8e41fae03688df676f not found: ID does not exist" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.995056 4669 scope.go:117] "RemoveContainer" containerID="b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a" Dec 10 15:57:46 crc kubenswrapper[4669]: E1210 15:57:46.995395 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a\": container with ID starting with b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a not found: ID does not exist" containerID="b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.995432 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a"} err="failed to get container status \"b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a\": rpc error: code = NotFound desc = could not find container \"b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a\": container with ID starting with b74299b67f53b8d25e1e96e51d082e6f99402fab6183600bb8e71dd87dd9576a not found: ID does not exist" Dec 10 15:57:46 crc kubenswrapper[4669]: I1210 15:57:46.995453 4669 scope.go:117] "RemoveContainer" containerID="b4586fc6664c218acae71e60c55b2f7e4bd104b3279f17f6c9989871c9a8be5b" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.396925 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-dkxdz/crc-debug-kk2dm"] Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397650 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="registry-server" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397677 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="registry-server" Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397706 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="extract-utilities" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397716 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="extract-utilities" Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397732 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a852cea-b3a2-42f9-9526-80a98e43ce39" containerName="container-00" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397740 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a852cea-b3a2-42f9-9526-80a98e43ce39" containerName="container-00" Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397766 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="extract-utilities" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397775 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="extract-utilities" Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397787 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="extract-content" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397794 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="extract-content" Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397816 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="extract-content" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397826 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="extract-content" Dec 10 15:57:47 crc kubenswrapper[4669]: E1210 15:57:47.397834 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="registry-server" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.397840 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="registry-server" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.398010 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a852cea-b3a2-42f9-9526-80a98e43ce39" containerName="container-00" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.398024 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" containerName="registry-server" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.398039 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="008809e9-7d90-42f8-8854-d686d433f7de" containerName="registry-server" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.399713 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.402679 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-dkxdz"/"default-dockercfg-rdv4q" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.567241 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7q6n\" (UniqueName: \"kubernetes.io/projected/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-kube-api-access-h7q6n\") pod \"crc-debug-kk2dm\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.567295 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-host\") pod \"crc-debug-kk2dm\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.668957 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7q6n\" (UniqueName: \"kubernetes.io/projected/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-kube-api-access-h7q6n\") pod \"crc-debug-kk2dm\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.669073 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-host\") pod \"crc-debug-kk2dm\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.669204 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-host\") pod \"crc-debug-kk2dm\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.688464 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7q6n\" (UniqueName: \"kubernetes.io/projected/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-kube-api-access-h7q6n\") pod \"crc-debug-kk2dm\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.716881 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:47 crc kubenswrapper[4669]: I1210 15:57:47.785575 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" event={"ID":"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066","Type":"ContainerStarted","Data":"fb568aa8a39d68b00427e0d9eef6c089798512ec8c0d424ef84d08a6cc528cf1"} Dec 10 15:57:48 crc kubenswrapper[4669]: I1210 15:57:48.412969 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="008809e9-7d90-42f8-8854-d686d433f7de" path="/var/lib/kubelet/pods/008809e9-7d90-42f8-8854-d686d433f7de/volumes" Dec 10 15:57:48 crc kubenswrapper[4669]: I1210 15:57:48.414505 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1d7c084-21af-4843-b1e1-7d7765ca2794" path="/var/lib/kubelet/pods/f1d7c084-21af-4843-b1e1-7d7765ca2794/volumes" Dec 10 15:57:48 crc kubenswrapper[4669]: I1210 15:57:48.795576 4669 generic.go:334] "Generic (PLEG): container finished" podID="a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" containerID="02a440dda0d1c404dcb550afbc2cd5592fe040eea66a2f4fead47460073cfe9c" exitCode=1 Dec 10 15:57:48 crc kubenswrapper[4669]: I1210 15:57:48.795880 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" event={"ID":"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066","Type":"ContainerDied","Data":"02a440dda0d1c404dcb550afbc2cd5592fe040eea66a2f4fead47460073cfe9c"} Dec 10 15:57:48 crc kubenswrapper[4669]: I1210 15:57:48.829778 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dkxdz/crc-debug-kk2dm"] Dec 10 15:57:48 crc kubenswrapper[4669]: I1210 15:57:48.841282 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dkxdz/crc-debug-kk2dm"] Dec 10 15:57:49 crc kubenswrapper[4669]: I1210 15:57:49.914445 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.024136 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7q6n\" (UniqueName: \"kubernetes.io/projected/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-kube-api-access-h7q6n\") pod \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.024411 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-host\") pod \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\" (UID: \"a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066\") " Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.024919 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-host" (OuterVolumeSpecName: "host") pod "a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" (UID: "a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.043841 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-kube-api-access-h7q6n" (OuterVolumeSpecName: "kube-api-access-h7q6n") pod "a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" (UID: "a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066"). InnerVolumeSpecName "kube-api-access-h7q6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.126429 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7q6n\" (UniqueName: \"kubernetes.io/projected/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-kube-api-access-h7q6n\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.126473 4669 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066-host\") on node \"crc\" DevicePath \"\"" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.408460 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" path="/var/lib/kubelet/pods/a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066/volumes" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.809923 4669 scope.go:117] "RemoveContainer" containerID="02a440dda0d1c404dcb550afbc2cd5592fe040eea66a2f4fead47460073cfe9c" Dec 10 15:57:50 crc kubenswrapper[4669]: I1210 15:57:50.810051 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/crc-debug-kk2dm" Dec 10 15:58:37 crc kubenswrapper[4669]: I1210 15:58:37.677858 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-547d56d5c6-827zl_e492df2a-6b67-4562-96cd-3c7495e7f9b3/barbican-api/0.log" Dec 10 15:58:37 crc kubenswrapper[4669]: I1210 15:58:37.856172 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-547d56d5c6-827zl_e492df2a-6b67-4562-96cd-3c7495e7f9b3/barbican-api-log/0.log" Dec 10 15:58:37 crc kubenswrapper[4669]: I1210 15:58:37.973616 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d6f8b949d-tbds5_8a62c0c4-e96b-486b-8660-5a797598341b/barbican-keystone-listener/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.023808 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d6f8b949d-tbds5_8a62c0c4-e96b-486b-8660-5a797598341b/barbican-keystone-listener-log/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.168198 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-dc69454c7-zsnjz_87675b91-d647-453f-bb63-2d10ddb27991/barbican-worker/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.229911 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-dc69454c7-zsnjz_87675b91-d647-453f-bb63-2d10ddb27991/barbican-worker-log/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.376100 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-2csns_2919610d-6d25-4181-a177-04920d27ee8d/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.496157 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_38eb24f6-e94c-4469-8284-4e3e79ca1712/ceilometer-central-agent/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.566881 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_38eb24f6-e94c-4469-8284-4e3e79ca1712/ceilometer-notification-agent/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.623143 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_38eb24f6-e94c-4469-8284-4e3e79ca1712/proxy-httpd/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.738013 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceph-hci-pre-edpm-deployment-openstack-edpm-ipam-dw8dk_c65b4bdc-0d6f-4952-9945-6685f952fe6c/ceph-hci-pre-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.747873 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_38eb24f6-e94c-4469-8284-4e3e79ca1712/sg-core/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.940286 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_345ad3da-25c1-4df8-8787-41d753b480ce/cinder-api/0.log" Dec 10 15:58:38 crc kubenswrapper[4669]: I1210 15:58:38.993311 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_345ad3da-25c1-4df8-8787-41d753b480ce/cinder-api-log/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.136151 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_62ef9f0e-8922-4e9f-a3d5-8c713471cc3e/probe/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.146507 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_62ef9f0e-8922-4e9f-a3d5-8c713471cc3e/cinder-scheduler/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.323597 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-4kxg9_e1113280-5934-4d11-9449-b96a953ca8d4/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.384358 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-dtbw2_b543a9bb-281e-40da-bc8d-9a50df670090/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.520422 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667ff9c869-hmjvp_d4ae1a29-b756-4385-ac2e-834b97397c4a/init/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.773880 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-htq5r_f9c2de92-64ab-47c4-af56-0422c30259e2/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.777293 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667ff9c869-hmjvp_d4ae1a29-b756-4385-ac2e-834b97397c4a/init/0.log" Dec 10 15:58:39 crc kubenswrapper[4669]: I1210 15:58:39.849382 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-667ff9c869-hmjvp_d4ae1a29-b756-4385-ac2e-834b97397c4a/dnsmasq-dns/0.log" Dec 10 15:58:40 crc kubenswrapper[4669]: I1210 15:58:40.006892 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-659884ff9-rxsts_cb398931-6065-4b00-b312-b2fcfda385ab/keystone-api/0.log" Dec 10 15:58:40 crc kubenswrapper[4669]: I1210 15:58:40.068751 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_05a43950-03b0-4075-a8a5-d157dd6367db/kube-state-metrics/0.log" Dec 10 15:58:40 crc kubenswrapper[4669]: I1210 15:58:40.410774 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d8556c6c7-9clqn_46bed994-2127-48c1-9776-b303e6ea6bc7/neutron-api/0.log" Dec 10 15:58:40 crc kubenswrapper[4669]: I1210 15:58:40.525757 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-6d8556c6c7-9clqn_46bed994-2127-48c1-9776-b303e6ea6bc7/neutron-httpd/0.log" Dec 10 15:58:40 crc kubenswrapper[4669]: I1210 15:58:40.885766 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5dfb396a-215c-4b54-91ec-eff70e79dd70/nova-api-log/0.log" Dec 10 15:58:40 crc kubenswrapper[4669]: I1210 15:58:40.891647 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_5dfb396a-215c-4b54-91ec-eff70e79dd70/nova-api-api/0.log" Dec 10 15:58:41 crc kubenswrapper[4669]: I1210 15:58:41.393192 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_dffe45fb-00df-4b42-8982-fc996a9707dc/nova-cell0-conductor-conductor/0.log" Dec 10 15:58:41 crc kubenswrapper[4669]: I1210 15:58:41.488835 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_c288c8b0-b24e-40e8-8b75-79887e1d9ed4/nova-cell1-conductor-conductor/0.log" Dec 10 15:58:41 crc kubenswrapper[4669]: I1210 15:58:41.704895 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_1ea5cb75-2c66-4b3b-b38b-392e22552d43/nova-cell1-novncproxy-novncproxy/0.log" Dec 10 15:58:41 crc kubenswrapper[4669]: I1210 15:58:41.900177 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_9d7aa3a5-9d54-482c-b662-c1b60dcb7b30/nova-metadata-log/0.log" Dec 10 15:58:42 crc kubenswrapper[4669]: I1210 15:58:42.262605 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_ecd974e7-befa-41b2-a5ad-dfbdd0e69ba2/nova-scheduler-scheduler/0.log" Dec 10 15:58:42 crc kubenswrapper[4669]: I1210 15:58:42.421622 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_9d7aa3a5-9d54-482c-b662-c1b60dcb7b30/nova-metadata-metadata/0.log" Dec 10 15:58:42 crc kubenswrapper[4669]: I1210 15:58:42.479348 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_970268b9-aefe-4481-a415-94b74a1de83c/mysql-bootstrap/0.log" Dec 10 15:58:42 crc kubenswrapper[4669]: I1210 15:58:42.739638 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_970268b9-aefe-4481-a415-94b74a1de83c/mysql-bootstrap/0.log" Dec 10 15:58:42 crc kubenswrapper[4669]: I1210 15:58:42.786674 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_970268b9-aefe-4481-a415-94b74a1de83c/galera/0.log" Dec 10 15:58:42 crc kubenswrapper[4669]: I1210 15:58:42.793986 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7873e2f3-ba85-4e59-8866-dab32f5604c4/mysql-bootstrap/0.log" Dec 10 15:58:43 crc kubenswrapper[4669]: I1210 15:58:43.123730 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7873e2f3-ba85-4e59-8866-dab32f5604c4/mysql-bootstrap/0.log" Dec 10 15:58:43 crc kubenswrapper[4669]: I1210 15:58:43.133177 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7873e2f3-ba85-4e59-8866-dab32f5604c4/galera/0.log" Dec 10 15:58:43 crc kubenswrapper[4669]: I1210 15:58:43.153091 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_6e062ab8-0c3e-4566-84d6-32dd4a604f41/openstackclient/0.log" Dec 10 15:58:43 crc kubenswrapper[4669]: I1210 15:58:43.346609 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-j9nmw_f79f439d-6ac0-4ebc-8ac8-1023ec207254/ovn-controller/0.log" Dec 10 15:58:43 crc kubenswrapper[4669]: I1210 15:58:43.402125 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-z2z75_e8e724f9-542f-4eae-96db-60eb825c5de0/openstack-network-exporter/0.log" Dec 10 15:58:43 crc kubenswrapper[4669]: I1210 15:58:43.687696 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbqvq_c64b1bab-fa16-4b15-b9f7-7d821c4a8059/ovsdb-server-init/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.003363 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbqvq_c64b1bab-fa16-4b15-b9f7-7d821c4a8059/ovsdb-server/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.038043 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbqvq_c64b1bab-fa16-4b15-b9f7-7d821c4a8059/ovs-vswitchd/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.068693 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-bbqvq_c64b1bab-fa16-4b15-b9f7-7d821c4a8059/ovsdb-server-init/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.291316 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_af1a1d1a-d7b1-4841-ab3c-f643db33079b/openstack-network-exporter/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.299832 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_af1a1d1a-d7b1-4841-ab3c-f643db33079b/ovn-northd/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.439184 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_8d18be2e-49c2-413b-87d3-c76d505b482b/openstack-network-exporter/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.532559 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_8d18be2e-49c2-413b-87d3-c76d505b482b/ovsdbserver-nb/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.773319 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2/openstack-network-exporter/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.786894 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_6e5b5fd4-ca66-4ea0-8834-c6f3ffa0eda2/ovsdbserver-sb/0.log" Dec 10 15:58:44 crc kubenswrapper[4669]: I1210 15:58:44.901938 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6556f5d7cd-dgcb9_7fe33769-9a18-405c-a7a8-e1fbcb719fff/placement-api/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.054373 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-6556f5d7cd-dgcb9_7fe33769-9a18-405c-a7a8-e1fbcb719fff/placement-log/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.122896 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_fe16c440-f893-46cb-b038-536568c85b5b/setup-container/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.461600 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_61575229-df2e-466a-858a-02d9fa0c1e79/setup-container/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.486572 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_fe16c440-f893-46cb-b038-536568c85b5b/setup-container/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.547548 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_fe16c440-f893-46cb-b038-536568c85b5b/rabbitmq/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.745093 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_61575229-df2e-466a-858a-02d9fa0c1e79/setup-container/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.809619 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-9knwk_a7f38325-ba76-4bb5-85c2-b0fc44213a71/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:45 crc kubenswrapper[4669]: I1210 15:58:45.815639 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_61575229-df2e-466a-858a-02d9fa0c1e79/rabbitmq/0.log" Dec 10 15:58:46 crc kubenswrapper[4669]: I1210 15:58:46.066452 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-gk4j2_a9ab686a-606a-4af1-83e6-42db47c6c650/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:46 crc kubenswrapper[4669]: I1210 15:58:46.139360 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-ch2tx_ced1fc19-0076-4a98-82fa-a93c33c4a43a/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:46 crc kubenswrapper[4669]: I1210 15:58:46.379605 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-vmdrd_4617562a-4946-4591-b331-853992039296/ssh-known-hosts-edpm-deployment/0.log" Dec 10 15:58:46 crc kubenswrapper[4669]: I1210 15:58:46.461570 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-rvhvg_63939037-98a0-4152-95a4-5a64323a3ee3/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 10 15:58:46 crc kubenswrapper[4669]: I1210 15:58:46.517234 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_66f18088-3aa3-4dfc-b85b-9289f133a199/memcached/0.log" Dec 10 15:58:58 crc kubenswrapper[4669]: I1210 15:58:58.744525 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:58:58 crc kubenswrapper[4669]: I1210 15:58:58.745078 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.333078 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/util/0.log" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.566746 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/pull/0.log" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.595701 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/util/0.log" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.655634 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/pull/0.log" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.839112 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/util/0.log" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.886774 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/extract/0.log" Dec 10 15:59:10 crc kubenswrapper[4669]: I1210 15:59:10.887855 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_5bdffe0843ef097cc603bdce1de3a2e7a3bffc8c77495484eaa05cfe0btft7d_2358ffe2-b531-4017-b8af-fc3915d57ee2/pull/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.026202 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-lhblg_a260011c-3fcf-47cd-9472-20b180b4bd2f/kube-rbac-proxy/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.207098 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-lhblg_a260011c-3fcf-47cd-9472-20b180b4bd2f/manager/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.215295 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-d6c6m_4ecff5d1-1a76-4282-a11e-ee74b69e7450/kube-rbac-proxy/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.314962 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-6c677c69b-d6c6m_4ecff5d1-1a76-4282-a11e-ee74b69e7450/manager/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.451920 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-47j5t_f0da37f3-9f8c-4d66-ba13-6c1da41ceba2/kube-rbac-proxy/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.489893 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-697fb699cf-47j5t_f0da37f3-9f8c-4d66-ba13-6c1da41ceba2/manager/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.688746 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-vzhnx_9b8ca892-98a2-4e46-816f-548631ceaf50/kube-rbac-proxy/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.729612 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-5697bb5779-vzhnx_9b8ca892-98a2-4e46-816f-548631ceaf50/manager/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.842077 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-gkcl6_ec748f3b-e193-43da-8d3b-c6d6169f58b5/kube-rbac-proxy/0.log" Dec 10 15:59:11 crc kubenswrapper[4669]: I1210 15:59:11.982767 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-fkxpl_edd21671-d820-4fb7-835e-97fd0ade3909/kube-rbac-proxy/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.047818 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-gkcl6_ec748f3b-e193-43da-8d3b-c6d6169f58b5/manager/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.142303 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-fkxpl_edd21671-d820-4fb7-835e-97fd0ade3909/manager/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.341061 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-qq45r_cfb93e7d-25ad-468f-8b68-9b6b57676a5a/kube-rbac-proxy/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.509199 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-78d48bff9d-qq45r_cfb93e7d-25ad-468f-8b68-9b6b57676a5a/manager/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.534176 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-sh22l_332b9f2c-9474-4368-9d76-3e98561c2279/kube-rbac-proxy/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.591905 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-967d97867-sh22l_332b9f2c-9474-4368-9d76-3e98561c2279/manager/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.716272 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-t7wdn_460630fb-9db1-487a-af29-d92b820e0a1b/kube-rbac-proxy/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.859395 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-t7wdn_460630fb-9db1-487a-af29-d92b820e0a1b/manager/0.log" Dec 10 15:59:12 crc kubenswrapper[4669]: I1210 15:59:12.928627 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-c9chn_c24a6a06-852c-476f-bb86-03c1e2430a48/kube-rbac-proxy/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.012541 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-5b5fd79c9c-c9chn_c24a6a06-852c-476f-bb86-03c1e2430a48/manager/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.131199 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-866l8_3daa730d-a51c-4330-8d36-712f27114f09/kube-rbac-proxy/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.430541 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-79c8c4686c-866l8_3daa730d-a51c-4330-8d36-712f27114f09/manager/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.633229 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-6ln9s_5f83dee3-b4c5-4c8a-ba44-78d74195e59c/manager/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.664921 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-6ln9s_5f83dee3-b4c5-4c8a-ba44-78d74195e59c/kube-rbac-proxy/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.819191 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8cxm8_d27eeff8-d1a3-4d08-a474-076b14194921/kube-rbac-proxy/0.log" Dec 10 15:59:13 crc kubenswrapper[4669]: I1210 15:59:13.964431 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-8cxm8_d27eeff8-d1a3-4d08-a474-076b14194921/manager/0.log" Dec 10 15:59:14 crc kubenswrapper[4669]: I1210 15:59:14.015773 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cgdhp_e68c43d5-161b-4ab1-9592-8a2d7f32f7eb/kube-rbac-proxy/0.log" Dec 10 15:59:14 crc kubenswrapper[4669]: I1210 15:59:14.080370 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-cgdhp_e68c43d5-161b-4ab1-9592-8a2d7f32f7eb/manager/0.log" Dec 10 15:59:14 crc kubenswrapper[4669]: I1210 15:59:14.202080 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879f99k4v_b8b8c80e-24e4-40bc-9927-21ce8b6c2667/kube-rbac-proxy/0.log" Dec 10 15:59:14 crc kubenswrapper[4669]: I1210 15:59:14.266251 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-84b575879f99k4v_b8b8c80e-24e4-40bc-9927-21ce8b6c2667/manager/0.log" Dec 10 15:59:14 crc kubenswrapper[4669]: I1210 15:59:14.611135 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-b7cfc_b25ef981-69d0-4a4e-bbb8-62f7a32cefdb/registry-server/0.log" Dec 10 15:59:14 crc kubenswrapper[4669]: I1210 15:59:14.688607 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-84dffcd785-tcn6l_2fa923a3-6ffe-40a5-8130-ff8220c64847/operator/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.107869 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-5zbv5_35c1b18e-dac3-46c5-8714-44b5f7cc3462/manager/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.220396 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-565cd4c864-2bxld_1530ae38-d334-436e-9599-54f0caeaf3c4/manager/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.326076 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-5zbv5_35c1b18e-dac3-46c5-8714-44b5f7cc3462/kube-rbac-proxy/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.639968 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-2x88f_812c4aed-2b51-4ae9-b36d-c3ac85d47d73/manager/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.649790 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-2x88f_812c4aed-2b51-4ae9-b36d-c3ac85d47d73/kube-rbac-proxy/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.784568 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-dg5lw_02302af5-b29e-4346-9f30-70ec3d5f8b59/operator/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.930526 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-n8qx2_b169689d-8a97-407f-81f7-56497bc77f0b/manager/0.log" Dec 10 15:59:15 crc kubenswrapper[4669]: I1210 15:59:15.987114 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9d58d64bc-n8qx2_b169689d-8a97-407f-81f7-56497bc77f0b/kube-rbac-proxy/0.log" Dec 10 15:59:16 crc kubenswrapper[4669]: I1210 15:59:16.176626 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-dsxdb_0afaf438-c06e-45c6-a814-d032d7a43700/kube-rbac-proxy/0.log" Dec 10 15:59:16 crc kubenswrapper[4669]: I1210 15:59:16.250614 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-57gx4_2db5775a-2728-4581-98c9-155056e55c21/kube-rbac-proxy/0.log" Dec 10 15:59:16 crc kubenswrapper[4669]: I1210 15:59:16.405717 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-58d5ff84df-57gx4_2db5775a-2728-4581-98c9-155056e55c21/manager/0.log" Dec 10 15:59:16 crc kubenswrapper[4669]: I1210 15:59:16.426116 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-dsxdb_0afaf438-c06e-45c6-a814-d032d7a43700/manager/0.log" Dec 10 15:59:16 crc kubenswrapper[4669]: I1210 15:59:16.530195 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-w4vkk_3e2afd98-5854-4fd7-abe6-059174bf661e/kube-rbac-proxy/0.log" Dec 10 15:59:16 crc kubenswrapper[4669]: I1210 15:59:16.561430 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-75944c9b7-w4vkk_3e2afd98-5854-4fd7-abe6-059174bf661e/manager/0.log" Dec 10 15:59:28 crc kubenswrapper[4669]: I1210 15:59:28.746763 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:59:28 crc kubenswrapper[4669]: I1210 15:59:28.747406 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:59:37 crc kubenswrapper[4669]: I1210 15:59:37.167012 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-zszbg_e32e3dfe-0229-477b-8e6c-bd40314231ee/control-plane-machine-set-operator/0.log" Dec 10 15:59:37 crc kubenswrapper[4669]: I1210 15:59:37.442928 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wvzzm_d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4/kube-rbac-proxy/0.log" Dec 10 15:59:37 crc kubenswrapper[4669]: I1210 15:59:37.473475 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-wvzzm_d3e994d0-cbc4-4a0f-ab58-ed8f5ba521b4/machine-api-operator/0.log" Dec 10 15:59:51 crc kubenswrapper[4669]: I1210 15:59:51.272126 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-rbjgt_0de15170-8813-4dc9-871f-dc6f85a01a7c/cert-manager-controller/0.log" Dec 10 15:59:51 crc kubenswrapper[4669]: I1210 15:59:51.884335 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-b99ls_4a677f57-8f30-4573-84fd-75b837e3be7d/cert-manager-cainjector/0.log" Dec 10 15:59:51 crc kubenswrapper[4669]: I1210 15:59:51.896628 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-547d8_f3df5787-f3a0-47cb-a6bc-ca2ffe0095e5/cert-manager-webhook/0.log" Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.744799 4669 patch_prober.go:28] interesting pod/machine-config-daemon-5tqlx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.746326 4669 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.746397 4669 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.747255 4669 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7"} pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.747329 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerName="machine-config-daemon" containerID="cri-o://b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" gracePeriod=600 Dec 10 15:59:58 crc kubenswrapper[4669]: E1210 15:59:58.884285 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.974741 4669 generic.go:334] "Generic (PLEG): container finished" podID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" exitCode=0 Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.974817 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerDied","Data":"b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7"} Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.975469 4669 scope.go:117] "RemoveContainer" containerID="a5a1989b7116ed9e8655569d3bb8c3b49ea98622b68772ed51b4984f5fad6f0e" Dec 10 15:59:58 crc kubenswrapper[4669]: I1210 15:59:58.976012 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 15:59:58 crc kubenswrapper[4669]: E1210 15:59:58.976271 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.153370 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t"] Dec 10 16:00:00 crc kubenswrapper[4669]: E1210 16:00:00.153795 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" containerName="container-00" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.153812 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" containerName="container-00" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.154012 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50098ca-3ea9-4faf-bb4d-f3ac9a7d6066" containerName="container-00" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.154586 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.156811 4669 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.157417 4669 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.172757 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t"] Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.227578 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nx74\" (UniqueName: \"kubernetes.io/projected/cb216a75-5e6c-431f-8c1e-16bf389b318b-kube-api-access-8nx74\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.227701 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb216a75-5e6c-431f-8c1e-16bf389b318b-secret-volume\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.227742 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb216a75-5e6c-431f-8c1e-16bf389b318b-config-volume\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.329594 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb216a75-5e6c-431f-8c1e-16bf389b318b-config-volume\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.329794 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nx74\" (UniqueName: \"kubernetes.io/projected/cb216a75-5e6c-431f-8c1e-16bf389b318b-kube-api-access-8nx74\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.329925 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb216a75-5e6c-431f-8c1e-16bf389b318b-secret-volume\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.330588 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb216a75-5e6c-431f-8c1e-16bf389b318b-config-volume\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.336189 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb216a75-5e6c-431f-8c1e-16bf389b318b-secret-volume\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.350209 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nx74\" (UniqueName: \"kubernetes.io/projected/cb216a75-5e6c-431f-8c1e-16bf389b318b-kube-api-access-8nx74\") pod \"collect-profiles-29423040-85z8t\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:00 crc kubenswrapper[4669]: I1210 16:00:00.476389 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:01 crc kubenswrapper[4669]: I1210 16:00:01.117365 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t"] Dec 10 16:00:02 crc kubenswrapper[4669]: I1210 16:00:02.004113 4669 generic.go:334] "Generic (PLEG): container finished" podID="cb216a75-5e6c-431f-8c1e-16bf389b318b" containerID="a37a07348984412f184570257625f23c0b67f2cd3da0af36121645f422e11dc7" exitCode=0 Dec 10 16:00:02 crc kubenswrapper[4669]: I1210 16:00:02.004347 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" event={"ID":"cb216a75-5e6c-431f-8c1e-16bf389b318b","Type":"ContainerDied","Data":"a37a07348984412f184570257625f23c0b67f2cd3da0af36121645f422e11dc7"} Dec 10 16:00:02 crc kubenswrapper[4669]: I1210 16:00:02.004856 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" event={"ID":"cb216a75-5e6c-431f-8c1e-16bf389b318b","Type":"ContainerStarted","Data":"87670a4650f7f9c39a5bcd14c664af431357b68ffa836c1caebc7624483306d2"} Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.302142 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.479807 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb216a75-5e6c-431f-8c1e-16bf389b318b-secret-volume\") pod \"cb216a75-5e6c-431f-8c1e-16bf389b318b\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.479902 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb216a75-5e6c-431f-8c1e-16bf389b318b-config-volume\") pod \"cb216a75-5e6c-431f-8c1e-16bf389b318b\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.480071 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8nx74\" (UniqueName: \"kubernetes.io/projected/cb216a75-5e6c-431f-8c1e-16bf389b318b-kube-api-access-8nx74\") pod \"cb216a75-5e6c-431f-8c1e-16bf389b318b\" (UID: \"cb216a75-5e6c-431f-8c1e-16bf389b318b\") " Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.480760 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb216a75-5e6c-431f-8c1e-16bf389b318b-config-volume" (OuterVolumeSpecName: "config-volume") pod "cb216a75-5e6c-431f-8c1e-16bf389b318b" (UID: "cb216a75-5e6c-431f-8c1e-16bf389b318b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.487466 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb216a75-5e6c-431f-8c1e-16bf389b318b-kube-api-access-8nx74" (OuterVolumeSpecName: "kube-api-access-8nx74") pod "cb216a75-5e6c-431f-8c1e-16bf389b318b" (UID: "cb216a75-5e6c-431f-8c1e-16bf389b318b"). InnerVolumeSpecName "kube-api-access-8nx74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.488342 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb216a75-5e6c-431f-8c1e-16bf389b318b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cb216a75-5e6c-431f-8c1e-16bf389b318b" (UID: "cb216a75-5e6c-431f-8c1e-16bf389b318b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.583263 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8nx74\" (UniqueName: \"kubernetes.io/projected/cb216a75-5e6c-431f-8c1e-16bf389b318b-kube-api-access-8nx74\") on node \"crc\" DevicePath \"\"" Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.583294 4669 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb216a75-5e6c-431f-8c1e-16bf389b318b-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 10 16:00:03 crc kubenswrapper[4669]: I1210 16:00:03.583303 4669 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb216a75-5e6c-431f-8c1e-16bf389b318b-config-volume\") on node \"crc\" DevicePath \"\"" Dec 10 16:00:04 crc kubenswrapper[4669]: I1210 16:00:04.024991 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" event={"ID":"cb216a75-5e6c-431f-8c1e-16bf389b318b","Type":"ContainerDied","Data":"87670a4650f7f9c39a5bcd14c664af431357b68ffa836c1caebc7624483306d2"} Dec 10 16:00:04 crc kubenswrapper[4669]: I1210 16:00:04.025029 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="87670a4650f7f9c39a5bcd14c664af431357b68ffa836c1caebc7624483306d2" Dec 10 16:00:04 crc kubenswrapper[4669]: I1210 16:00:04.025074 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29423040-85z8t" Dec 10 16:00:04 crc kubenswrapper[4669]: I1210 16:00:04.382734 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr"] Dec 10 16:00:04 crc kubenswrapper[4669]: I1210 16:00:04.390552 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29422995-wlslr"] Dec 10 16:00:04 crc kubenswrapper[4669]: I1210 16:00:04.410286 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a88168e0-0728-4c47-8d89-5ece2fa293b9" path="/var/lib/kubelet/pods/a88168e0-0728-4c47-8d89-5ece2fa293b9/volumes" Dec 10 16:00:06 crc kubenswrapper[4669]: I1210 16:00:06.284185 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-cr4c8_82533b7d-e2b7-46f8-9522-e62da9f8f8dd/nmstate-console-plugin/0.log" Dec 10 16:00:06 crc kubenswrapper[4669]: I1210 16:00:06.447783 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-bh52g_19ab958b-08b5-444a-a29b-b9e787c0bd29/nmstate-handler/0.log" Dec 10 16:00:06 crc kubenswrapper[4669]: I1210 16:00:06.649734 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-9vchl_b9806903-d8e2-4131-9bf1-14d0e1c7597b/kube-rbac-proxy/0.log" Dec 10 16:00:06 crc kubenswrapper[4669]: I1210 16:00:06.663039 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-9vchl_b9806903-d8e2-4131-9bf1-14d0e1c7597b/nmstate-metrics/0.log" Dec 10 16:00:06 crc kubenswrapper[4669]: I1210 16:00:06.765481 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-ghbc7_41cd6a49-032a-4f5a-988b-1af1808b5ae5/nmstate-operator/0.log" Dec 10 16:00:06 crc kubenswrapper[4669]: I1210 16:00:06.872888 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-vl7cd_05ce5aaa-1c86-4f42-add8-f24bb1789c02/nmstate-webhook/0.log" Dec 10 16:00:10 crc kubenswrapper[4669]: I1210 16:00:10.397589 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:00:10 crc kubenswrapper[4669]: E1210 16:00:10.398475 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.238619 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-5cxqp_7e1bd3e9-c78c-4b8b-84c1-b37502c9c927/kube-rbac-proxy/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.325835 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-5cxqp_7e1bd3e9-c78c-4b8b-84c1-b37502c9c927/controller/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.473989 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-frr-files/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.661648 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-frr-files/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.724359 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-reloader/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.764968 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-reloader/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.770089 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-metrics/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.956581 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-reloader/0.log" Dec 10 16:00:21 crc kubenswrapper[4669]: I1210 16:00:21.998384 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-metrics/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.041478 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-frr-files/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.079831 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-metrics/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.239088 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-frr-files/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.268593 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-metrics/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.269278 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/cp-reloader/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.325450 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/controller/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.398349 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:00:22 crc kubenswrapper[4669]: E1210 16:00:22.398794 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.496892 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/kube-rbac-proxy/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.557959 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/frr-metrics/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.585817 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/kube-rbac-proxy-frr/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.808884 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/reloader/0.log" Dec 10 16:00:22 crc kubenswrapper[4669]: I1210 16:00:22.852761 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-nht72_e7255015-ac33-4ed6-8b27-432ef76cd293/frr-k8s-webhook-server/0.log" Dec 10 16:00:23 crc kubenswrapper[4669]: I1210 16:00:23.145119 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-7b4fb5f468-2kmvx_46153871-4c53-450c-a8ac-4cb540652173/manager/0.log" Dec 10 16:00:23 crc kubenswrapper[4669]: I1210 16:00:23.280745 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7cf4974685-slvbg_943aba10-b13d-4ba0-900b-66cdc776b921/webhook-server/0.log" Dec 10 16:00:23 crc kubenswrapper[4669]: I1210 16:00:23.338763 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-8nqwr_e98ae1e5-2f3e-4204-bd7a-e7e00438c186/frr/0.log" Dec 10 16:00:23 crc kubenswrapper[4669]: I1210 16:00:23.462155 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nvh5p_963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5/kube-rbac-proxy/0.log" Dec 10 16:00:23 crc kubenswrapper[4669]: I1210 16:00:23.832152 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nvh5p_963543ef-6eb5-4ccb-9b8b-a049dfe8b4e5/speaker/0.log" Dec 10 16:00:34 crc kubenswrapper[4669]: I1210 16:00:34.233125 4669 scope.go:117] "RemoveContainer" containerID="af360676c0f5d8565e7270fc744610842736e7959a8505cc46a0a9c6b6d7ca83" Dec 10 16:00:35 crc kubenswrapper[4669]: I1210 16:00:35.909401 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/util/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.187313 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/util/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.219171 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/pull/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.219007 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/pull/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.371186 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/pull/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.409911 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/util/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.410850 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fmlqp4_e5b4e814-6b86-460c-9e84-130e84192f88/extract/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.559631 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/util/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.725553 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/pull/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.789852 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/pull/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.799453 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/util/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.918993 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/pull/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.958133 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/util/0.log" Dec 10 16:00:36 crc kubenswrapper[4669]: I1210 16:00:36.992030 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f8359wh4_dcb7775d-549e-4734-9047-1a9ff0cbca3c/extract/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.113536 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/extract-utilities/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.396675 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/extract-utilities/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.397722 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:00:37 crc kubenswrapper[4669]: E1210 16:00:37.397946 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.406684 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/extract-content/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.422612 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/extract-content/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.585698 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/extract-utilities/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.594672 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/extract-content/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.936655 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/extract-utilities/0.log" Dec 10 16:00:37 crc kubenswrapper[4669]: I1210 16:00:37.937917 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-jmngh_5a969328-820f-4d0e-9618-f40a0d6fd480/registry-server/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.075602 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/extract-utilities/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.113759 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/extract-content/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.152291 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/extract-content/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.274982 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/extract-content/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.290876 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/extract-utilities/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.531428 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-szgr8_ea027979-8f03-4b1d-862f-a7b7e72155c1/marketplace-operator/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.699116 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-gknnp_ebd3f28a-b015-4c75-9b7c-2bab7c397648/registry-server/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.714365 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/extract-utilities/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.888110 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/extract-utilities/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.920587 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/extract-content/0.log" Dec 10 16:00:38 crc kubenswrapper[4669]: I1210 16:00:38.963499 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/extract-content/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.117806 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/extract-utilities/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.147007 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/extract-content/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.239461 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-zgjrg_81135d4d-3c9e-4d4e-8f6e-876541123d69/registry-server/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.368161 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/extract-utilities/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.552123 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/extract-utilities/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.574775 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/extract-content/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.591064 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/extract-content/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.770114 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/extract-content/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.774608 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/extract-utilities/0.log" Dec 10 16:00:39 crc kubenswrapper[4669]: I1210 16:00:39.968282 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-kr84h_f1abb4ff-31b5-46e6-b036-8a998009eaeb/registry-server/0.log" Dec 10 16:00:50 crc kubenswrapper[4669]: I1210 16:00:50.399237 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:00:50 crc kubenswrapper[4669]: E1210 16:00:50.400240 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.168943 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29423041-xghcg"] Dec 10 16:01:00 crc kubenswrapper[4669]: E1210 16:01:00.169935 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb216a75-5e6c-431f-8c1e-16bf389b318b" containerName="collect-profiles" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.169952 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb216a75-5e6c-431f-8c1e-16bf389b318b" containerName="collect-profiles" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.170145 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb216a75-5e6c-431f-8c1e-16bf389b318b" containerName="collect-profiles" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.170749 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.183724 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29423041-xghcg"] Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.363037 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-fernet-keys\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.363362 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-config-data\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.363411 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-combined-ca-bundle\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.363711 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7dz5\" (UniqueName: \"kubernetes.io/projected/cab359a4-c8c4-4481-a254-468aab5abb8d-kube-api-access-z7dz5\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.465824 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7dz5\" (UniqueName: \"kubernetes.io/projected/cab359a4-c8c4-4481-a254-468aab5abb8d-kube-api-access-z7dz5\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.465913 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-fernet-keys\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.465933 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-config-data\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.465966 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-combined-ca-bundle\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.483518 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-config-data\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.483992 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-fernet-keys\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.484121 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-combined-ca-bundle\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.490944 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7dz5\" (UniqueName: \"kubernetes.io/projected/cab359a4-c8c4-4481-a254-468aab5abb8d-kube-api-access-z7dz5\") pod \"keystone-cron-29423041-xghcg\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:00 crc kubenswrapper[4669]: I1210 16:01:00.499673 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:01 crc kubenswrapper[4669]: I1210 16:01:01.078766 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29423041-xghcg"] Dec 10 16:01:01 crc kubenswrapper[4669]: I1210 16:01:01.398031 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:01:01 crc kubenswrapper[4669]: E1210 16:01:01.398601 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:01:01 crc kubenswrapper[4669]: I1210 16:01:01.559463 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29423041-xghcg" event={"ID":"cab359a4-c8c4-4481-a254-468aab5abb8d","Type":"ContainerStarted","Data":"a819331c16213501879f18411fbe7eea216877fb00cbb778fc0c18cfea4d91fd"} Dec 10 16:01:01 crc kubenswrapper[4669]: I1210 16:01:01.559767 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29423041-xghcg" event={"ID":"cab359a4-c8c4-4481-a254-468aab5abb8d","Type":"ContainerStarted","Data":"ac68f0199d2b5fff21d66cbb7de2276fce2b5ec5df6fe0f6a5438fdee8f18361"} Dec 10 16:01:01 crc kubenswrapper[4669]: I1210 16:01:01.579440 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29423041-xghcg" podStartSLOduration=1.579423944 podStartE2EDuration="1.579423944s" podCreationTimestamp="2025-12-10 16:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-10 16:01:01.57765254 +0000 UTC m=+2435.494599187" watchObservedRunningTime="2025-12-10 16:01:01.579423944 +0000 UTC m=+2435.496370571" Dec 10 16:01:06 crc kubenswrapper[4669]: I1210 16:01:06.605012 4669 generic.go:334] "Generic (PLEG): container finished" podID="cab359a4-c8c4-4481-a254-468aab5abb8d" containerID="a819331c16213501879f18411fbe7eea216877fb00cbb778fc0c18cfea4d91fd" exitCode=0 Dec 10 16:01:06 crc kubenswrapper[4669]: I1210 16:01:06.605256 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29423041-xghcg" event={"ID":"cab359a4-c8c4-4481-a254-468aab5abb8d","Type":"ContainerDied","Data":"a819331c16213501879f18411fbe7eea216877fb00cbb778fc0c18cfea4d91fd"} Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.025564 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.201103 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-config-data\") pod \"cab359a4-c8c4-4481-a254-468aab5abb8d\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.201144 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-fernet-keys\") pod \"cab359a4-c8c4-4481-a254-468aab5abb8d\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.201271 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7dz5\" (UniqueName: \"kubernetes.io/projected/cab359a4-c8c4-4481-a254-468aab5abb8d-kube-api-access-z7dz5\") pod \"cab359a4-c8c4-4481-a254-468aab5abb8d\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.201318 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-combined-ca-bundle\") pod \"cab359a4-c8c4-4481-a254-468aab5abb8d\" (UID: \"cab359a4-c8c4-4481-a254-468aab5abb8d\") " Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.222023 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "cab359a4-c8c4-4481-a254-468aab5abb8d" (UID: "cab359a4-c8c4-4481-a254-468aab5abb8d"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.240018 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cab359a4-c8c4-4481-a254-468aab5abb8d-kube-api-access-z7dz5" (OuterVolumeSpecName: "kube-api-access-z7dz5") pod "cab359a4-c8c4-4481-a254-468aab5abb8d" (UID: "cab359a4-c8c4-4481-a254-468aab5abb8d"). InnerVolumeSpecName "kube-api-access-z7dz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.282339 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-config-data" (OuterVolumeSpecName: "config-data") pod "cab359a4-c8c4-4481-a254-468aab5abb8d" (UID: "cab359a4-c8c4-4481-a254-468aab5abb8d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.303406 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7dz5\" (UniqueName: \"kubernetes.io/projected/cab359a4-c8c4-4481-a254-468aab5abb8d-kube-api-access-z7dz5\") on node \"crc\" DevicePath \"\"" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.303443 4669 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-config-data\") on node \"crc\" DevicePath \"\"" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.303458 4669 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.307936 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cab359a4-c8c4-4481-a254-468aab5abb8d" (UID: "cab359a4-c8c4-4481-a254-468aab5abb8d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.413325 4669 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cab359a4-c8c4-4481-a254-468aab5abb8d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.622925 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29423041-xghcg" event={"ID":"cab359a4-c8c4-4481-a254-468aab5abb8d","Type":"ContainerDied","Data":"ac68f0199d2b5fff21d66cbb7de2276fce2b5ec5df6fe0f6a5438fdee8f18361"} Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.623244 4669 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac68f0199d2b5fff21d66cbb7de2276fce2b5ec5df6fe0f6a5438fdee8f18361" Dec 10 16:01:08 crc kubenswrapper[4669]: I1210 16:01:08.623199 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29423041-xghcg" Dec 10 16:01:13 crc kubenswrapper[4669]: I1210 16:01:13.397471 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:01:13 crc kubenswrapper[4669]: E1210 16:01:13.398085 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:01:25 crc kubenswrapper[4669]: I1210 16:01:25.397900 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:01:25 crc kubenswrapper[4669]: E1210 16:01:25.398788 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:01:39 crc kubenswrapper[4669]: I1210 16:01:39.397978 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:01:39 crc kubenswrapper[4669]: E1210 16:01:39.400606 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:01:52 crc kubenswrapper[4669]: I1210 16:01:52.398771 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:01:52 crc kubenswrapper[4669]: E1210 16:01:52.399440 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:02:04 crc kubenswrapper[4669]: I1210 16:02:04.398857 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:02:04 crc kubenswrapper[4669]: E1210 16:02:04.399627 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:02:15 crc kubenswrapper[4669]: I1210 16:02:15.398585 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:02:15 crc kubenswrapper[4669]: E1210 16:02:15.399293 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:02:27 crc kubenswrapper[4669]: I1210 16:02:27.398083 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:02:27 crc kubenswrapper[4669]: E1210 16:02:27.399065 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:02:29 crc kubenswrapper[4669]: I1210 16:02:29.376963 4669 generic.go:334] "Generic (PLEG): container finished" podID="7adf36d6-68da-47a1-98e4-203642fff3df" containerID="ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a" exitCode=0 Dec 10 16:02:29 crc kubenswrapper[4669]: I1210 16:02:29.377034 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-dkxdz/must-gather-bclxt" event={"ID":"7adf36d6-68da-47a1-98e4-203642fff3df","Type":"ContainerDied","Data":"ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a"} Dec 10 16:02:29 crc kubenswrapper[4669]: I1210 16:02:29.377905 4669 scope.go:117] "RemoveContainer" containerID="ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a" Dec 10 16:02:29 crc kubenswrapper[4669]: I1210 16:02:29.991963 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dkxdz_must-gather-bclxt_7adf36d6-68da-47a1-98e4-203642fff3df/gather/0.log" Dec 10 16:02:37 crc kubenswrapper[4669]: I1210 16:02:37.480589 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-dkxdz/must-gather-bclxt"] Dec 10 16:02:37 crc kubenswrapper[4669]: I1210 16:02:37.481465 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-dkxdz/must-gather-bclxt" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="copy" containerID="cri-o://3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73" gracePeriod=2 Dec 10 16:02:37 crc kubenswrapper[4669]: I1210 16:02:37.489949 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-dkxdz/must-gather-bclxt"] Dec 10 16:02:37 crc kubenswrapper[4669]: I1210 16:02:37.915311 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dkxdz_must-gather-bclxt_7adf36d6-68da-47a1-98e4-203642fff3df/copy/0.log" Dec 10 16:02:37 crc kubenswrapper[4669]: I1210 16:02:37.916204 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.079378 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7adf36d6-68da-47a1-98e4-203642fff3df-must-gather-output\") pod \"7adf36d6-68da-47a1-98e4-203642fff3df\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.079543 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rgnvw\" (UniqueName: \"kubernetes.io/projected/7adf36d6-68da-47a1-98e4-203642fff3df-kube-api-access-rgnvw\") pod \"7adf36d6-68da-47a1-98e4-203642fff3df\" (UID: \"7adf36d6-68da-47a1-98e4-203642fff3df\") " Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.089443 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7adf36d6-68da-47a1-98e4-203642fff3df-kube-api-access-rgnvw" (OuterVolumeSpecName: "kube-api-access-rgnvw") pod "7adf36d6-68da-47a1-98e4-203642fff3df" (UID: "7adf36d6-68da-47a1-98e4-203642fff3df"). InnerVolumeSpecName "kube-api-access-rgnvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.181540 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rgnvw\" (UniqueName: \"kubernetes.io/projected/7adf36d6-68da-47a1-98e4-203642fff3df-kube-api-access-rgnvw\") on node \"crc\" DevicePath \"\"" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.230759 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7adf36d6-68da-47a1-98e4-203642fff3df-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "7adf36d6-68da-47a1-98e4-203642fff3df" (UID: "7adf36d6-68da-47a1-98e4-203642fff3df"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.283089 4669 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/7adf36d6-68da-47a1-98e4-203642fff3df-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.408042 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" path="/var/lib/kubelet/pods/7adf36d6-68da-47a1-98e4-203642fff3df/volumes" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.479155 4669 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-dkxdz_must-gather-bclxt_7adf36d6-68da-47a1-98e4-203642fff3df/copy/0.log" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.481822 4669 generic.go:334] "Generic (PLEG): container finished" podID="7adf36d6-68da-47a1-98e4-203642fff3df" containerID="3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73" exitCode=143 Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.481873 4669 scope.go:117] "RemoveContainer" containerID="3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.481986 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-dkxdz/must-gather-bclxt" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.505162 4669 scope.go:117] "RemoveContainer" containerID="ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.577805 4669 scope.go:117] "RemoveContainer" containerID="3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73" Dec 10 16:02:38 crc kubenswrapper[4669]: E1210 16:02:38.578614 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73\": container with ID starting with 3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73 not found: ID does not exist" containerID="3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.578665 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73"} err="failed to get container status \"3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73\": rpc error: code = NotFound desc = could not find container \"3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73\": container with ID starting with 3d5a0146920c1562377b91bd0617bd51b9c71ca31d5a5feaf4cdff145f6aff73 not found: ID does not exist" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.578692 4669 scope.go:117] "RemoveContainer" containerID="ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a" Dec 10 16:02:38 crc kubenswrapper[4669]: E1210 16:02:38.579101 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a\": container with ID starting with ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a not found: ID does not exist" containerID="ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a" Dec 10 16:02:38 crc kubenswrapper[4669]: I1210 16:02:38.579138 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a"} err="failed to get container status \"ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a\": rpc error: code = NotFound desc = could not find container \"ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a\": container with ID starting with ce381a7b606bea1f2ce16b8ea8a8b3536bd8c85e0e9c7c5c2e9fbed17ffc664a not found: ID does not exist" Dec 10 16:02:40 crc kubenswrapper[4669]: I1210 16:02:40.399291 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:02:40 crc kubenswrapper[4669]: E1210 16:02:40.399841 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:02:51 crc kubenswrapper[4669]: I1210 16:02:51.398763 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:02:51 crc kubenswrapper[4669]: E1210 16:02:51.400115 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:03:02 crc kubenswrapper[4669]: I1210 16:03:02.398190 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:03:02 crc kubenswrapper[4669]: E1210 16:03:02.398936 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:03:16 crc kubenswrapper[4669]: I1210 16:03:16.404140 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:03:16 crc kubenswrapper[4669]: E1210 16:03:16.404910 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:03:27 crc kubenswrapper[4669]: I1210 16:03:27.398831 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:03:27 crc kubenswrapper[4669]: E1210 16:03:27.399922 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:03:40 crc kubenswrapper[4669]: I1210 16:03:40.398751 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:03:40 crc kubenswrapper[4669]: E1210 16:03:40.399642 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:03:54 crc kubenswrapper[4669]: I1210 16:03:54.398248 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:03:54 crc kubenswrapper[4669]: E1210 16:03:54.398914 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:04:08 crc kubenswrapper[4669]: I1210 16:04:08.398084 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:04:08 crc kubenswrapper[4669]: E1210 16:04:08.399046 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.207440 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6vvhv"] Dec 10 16:04:17 crc kubenswrapper[4669]: E1210 16:04:17.208520 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="copy" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.208540 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="copy" Dec 10 16:04:17 crc kubenswrapper[4669]: E1210 16:04:17.208558 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="gather" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.208567 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="gather" Dec 10 16:04:17 crc kubenswrapper[4669]: E1210 16:04:17.208578 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cab359a4-c8c4-4481-a254-468aab5abb8d" containerName="keystone-cron" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.208586 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="cab359a4-c8c4-4481-a254-468aab5abb8d" containerName="keystone-cron" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.208822 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="cab359a4-c8c4-4481-a254-468aab5abb8d" containerName="keystone-cron" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.208845 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="copy" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.208873 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="7adf36d6-68da-47a1-98e4-203642fff3df" containerName="gather" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.213384 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.217186 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6vvhv"] Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.321759 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-utilities\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.321899 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-catalog-content\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.321986 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5xs2\" (UniqueName: \"kubernetes.io/projected/47ba682e-e065-4165-9bc4-91bf3534b177-kube-api-access-f5xs2\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.423958 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-catalog-content\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.424074 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5xs2\" (UniqueName: \"kubernetes.io/projected/47ba682e-e065-4165-9bc4-91bf3534b177-kube-api-access-f5xs2\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.424174 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-utilities\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.424720 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-catalog-content\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.424819 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-utilities\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.448251 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5xs2\" (UniqueName: \"kubernetes.io/projected/47ba682e-e065-4165-9bc4-91bf3534b177-kube-api-access-f5xs2\") pod \"redhat-operators-6vvhv\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:17 crc kubenswrapper[4669]: I1210 16:04:17.551373 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:18 crc kubenswrapper[4669]: I1210 16:04:18.012962 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6vvhv"] Dec 10 16:04:18 crc kubenswrapper[4669]: I1210 16:04:18.473093 4669 generic.go:334] "Generic (PLEG): container finished" podID="47ba682e-e065-4165-9bc4-91bf3534b177" containerID="b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b" exitCode=0 Dec 10 16:04:18 crc kubenswrapper[4669]: I1210 16:04:18.473143 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerDied","Data":"b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b"} Dec 10 16:04:18 crc kubenswrapper[4669]: I1210 16:04:18.473173 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerStarted","Data":"f4cf2e95a7cd8a056b8b5ca987544adb6ca9c7934497f96123ee9ac20ebe9974"} Dec 10 16:04:18 crc kubenswrapper[4669]: I1210 16:04:18.483814 4669 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 10 16:04:19 crc kubenswrapper[4669]: I1210 16:04:19.399257 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:04:19 crc kubenswrapper[4669]: E1210 16:04:19.400105 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:04:20 crc kubenswrapper[4669]: I1210 16:04:20.495022 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerStarted","Data":"cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8"} Dec 10 16:04:21 crc kubenswrapper[4669]: I1210 16:04:21.505593 4669 generic.go:334] "Generic (PLEG): container finished" podID="47ba682e-e065-4165-9bc4-91bf3534b177" containerID="cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8" exitCode=0 Dec 10 16:04:21 crc kubenswrapper[4669]: I1210 16:04:21.505810 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerDied","Data":"cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8"} Dec 10 16:04:22 crc kubenswrapper[4669]: I1210 16:04:22.518825 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerStarted","Data":"81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb"} Dec 10 16:04:22 crc kubenswrapper[4669]: I1210 16:04:22.548293 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6vvhv" podStartSLOduration=2.104442818 podStartE2EDuration="5.548271664s" podCreationTimestamp="2025-12-10 16:04:17 +0000 UTC" firstStartedPulling="2025-12-10 16:04:18.483323651 +0000 UTC m=+2632.400270278" lastFinishedPulling="2025-12-10 16:04:21.927152497 +0000 UTC m=+2635.844099124" observedRunningTime="2025-12-10 16:04:22.539309505 +0000 UTC m=+2636.456256152" watchObservedRunningTime="2025-12-10 16:04:22.548271664 +0000 UTC m=+2636.465218291" Dec 10 16:04:27 crc kubenswrapper[4669]: I1210 16:04:27.551931 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:27 crc kubenswrapper[4669]: I1210 16:04:27.554354 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:28 crc kubenswrapper[4669]: I1210 16:04:28.605156 4669 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6vvhv" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="registry-server" probeResult="failure" output=< Dec 10 16:04:28 crc kubenswrapper[4669]: timeout: failed to connect service ":50051" within 1s Dec 10 16:04:28 crc kubenswrapper[4669]: > Dec 10 16:04:33 crc kubenswrapper[4669]: I1210 16:04:33.398418 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:04:33 crc kubenswrapper[4669]: E1210 16:04:33.399414 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:04:37 crc kubenswrapper[4669]: I1210 16:04:37.604767 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:37 crc kubenswrapper[4669]: I1210 16:04:37.668937 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:37 crc kubenswrapper[4669]: I1210 16:04:37.863033 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6vvhv"] Dec 10 16:04:38 crc kubenswrapper[4669]: I1210 16:04:38.648816 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6vvhv" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="registry-server" containerID="cri-o://81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb" gracePeriod=2 Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.063123 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.149980 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-catalog-content\") pod \"47ba682e-e065-4165-9bc4-91bf3534b177\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.150109 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-utilities\") pod \"47ba682e-e065-4165-9bc4-91bf3534b177\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.150187 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5xs2\" (UniqueName: \"kubernetes.io/projected/47ba682e-e065-4165-9bc4-91bf3534b177-kube-api-access-f5xs2\") pod \"47ba682e-e065-4165-9bc4-91bf3534b177\" (UID: \"47ba682e-e065-4165-9bc4-91bf3534b177\") " Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.151087 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-utilities" (OuterVolumeSpecName: "utilities") pod "47ba682e-e065-4165-9bc4-91bf3534b177" (UID: "47ba682e-e065-4165-9bc4-91bf3534b177"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.155783 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47ba682e-e065-4165-9bc4-91bf3534b177-kube-api-access-f5xs2" (OuterVolumeSpecName: "kube-api-access-f5xs2") pod "47ba682e-e065-4165-9bc4-91bf3534b177" (UID: "47ba682e-e065-4165-9bc4-91bf3534b177"). InnerVolumeSpecName "kube-api-access-f5xs2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.252099 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.252143 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5xs2\" (UniqueName: \"kubernetes.io/projected/47ba682e-e065-4165-9bc4-91bf3534b177-kube-api-access-f5xs2\") on node \"crc\" DevicePath \"\"" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.264781 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47ba682e-e065-4165-9bc4-91bf3534b177" (UID: "47ba682e-e065-4165-9bc4-91bf3534b177"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.354249 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47ba682e-e065-4165-9bc4-91bf3534b177-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.662588 4669 generic.go:334] "Generic (PLEG): container finished" podID="47ba682e-e065-4165-9bc4-91bf3534b177" containerID="81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb" exitCode=0 Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.662654 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6vvhv" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.662737 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerDied","Data":"81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb"} Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.663571 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6vvhv" event={"ID":"47ba682e-e065-4165-9bc4-91bf3534b177","Type":"ContainerDied","Data":"f4cf2e95a7cd8a056b8b5ca987544adb6ca9c7934497f96123ee9ac20ebe9974"} Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.663609 4669 scope.go:117] "RemoveContainer" containerID="81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.695939 4669 scope.go:117] "RemoveContainer" containerID="cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.696036 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6vvhv"] Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.707866 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6vvhv"] Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.720011 4669 scope.go:117] "RemoveContainer" containerID="b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.765036 4669 scope.go:117] "RemoveContainer" containerID="81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb" Dec 10 16:04:39 crc kubenswrapper[4669]: E1210 16:04:39.765717 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb\": container with ID starting with 81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb not found: ID does not exist" containerID="81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.765855 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb"} err="failed to get container status \"81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb\": rpc error: code = NotFound desc = could not find container \"81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb\": container with ID starting with 81665b88a822ad61423c3de63fe76908736a4c7d3e74db3ccca4073b8c2197bb not found: ID does not exist" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.765978 4669 scope.go:117] "RemoveContainer" containerID="cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8" Dec 10 16:04:39 crc kubenswrapper[4669]: E1210 16:04:39.766485 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8\": container with ID starting with cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8 not found: ID does not exist" containerID="cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.766630 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8"} err="failed to get container status \"cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8\": rpc error: code = NotFound desc = could not find container \"cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8\": container with ID starting with cb9dbc24e52e155007bd5dd39b163131819a25565346493ee9c874b3d43524b8 not found: ID does not exist" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.766721 4669 scope.go:117] "RemoveContainer" containerID="b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b" Dec 10 16:04:39 crc kubenswrapper[4669]: E1210 16:04:39.767119 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b\": container with ID starting with b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b not found: ID does not exist" containerID="b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b" Dec 10 16:04:39 crc kubenswrapper[4669]: I1210 16:04:39.767151 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b"} err="failed to get container status \"b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b\": rpc error: code = NotFound desc = could not find container \"b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b\": container with ID starting with b8f609ff049d1e2d85cffca15a7e4d105918dafd5db30ab917414d8a32a60c7b not found: ID does not exist" Dec 10 16:04:40 crc kubenswrapper[4669]: I1210 16:04:40.409895 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" path="/var/lib/kubelet/pods/47ba682e-e065-4165-9bc4-91bf3534b177/volumes" Dec 10 16:04:45 crc kubenswrapper[4669]: I1210 16:04:45.397950 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:04:45 crc kubenswrapper[4669]: E1210 16:04:45.398727 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:04:58 crc kubenswrapper[4669]: I1210 16:04:58.398295 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:04:58 crc kubenswrapper[4669]: E1210 16:04:58.399043 4669 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-5tqlx_openshift-machine-config-operator(ce60e1d4-6433-477d-89be-6ff9354dd0a4)\"" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" podUID="ce60e1d4-6433-477d-89be-6ff9354dd0a4" Dec 10 16:05:11 crc kubenswrapper[4669]: I1210 16:05:11.398723 4669 scope.go:117] "RemoveContainer" containerID="b7e32da58f9f03dd3819ef4eb0baafb38397e07b851624de2326159accce45f7" Dec 10 16:05:11 crc kubenswrapper[4669]: I1210 16:05:11.949151 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-5tqlx" event={"ID":"ce60e1d4-6433-477d-89be-6ff9354dd0a4","Type":"ContainerStarted","Data":"2c233a60813f91764a309ad33e8f7f47b61751f6ea3ab7db50124158bea45401"} Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.118881 4669 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zqcs7"] Dec 10 16:06:39 crc kubenswrapper[4669]: E1210 16:06:39.121577 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="registry-server" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.121752 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="registry-server" Dec 10 16:06:39 crc kubenswrapper[4669]: E1210 16:06:39.122086 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="extract-utilities" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.122200 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="extract-utilities" Dec 10 16:06:39 crc kubenswrapper[4669]: E1210 16:06:39.122358 4669 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="extract-content" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.122463 4669 state_mem.go:107] "Deleted CPUSet assignment" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="extract-content" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.128495 4669 memory_manager.go:354] "RemoveStaleState removing state" podUID="47ba682e-e065-4165-9bc4-91bf3534b177" containerName="registry-server" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.130613 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.141031 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zqcs7"] Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.219865 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d6nh\" (UniqueName: \"kubernetes.io/projected/35554913-80a7-4e31-a955-6157d295a0bf-kube-api-access-6d6nh\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.220139 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-catalog-content\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.220244 4669 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-utilities\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.321392 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-catalog-content\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.321443 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-utilities\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.321603 4669 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d6nh\" (UniqueName: \"kubernetes.io/projected/35554913-80a7-4e31-a955-6157d295a0bf-kube-api-access-6d6nh\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.322070 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-catalog-content\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.322145 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-utilities\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.346037 4669 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d6nh\" (UniqueName: \"kubernetes.io/projected/35554913-80a7-4e31-a955-6157d295a0bf-kube-api-access-6d6nh\") pod \"certified-operators-zqcs7\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:39 crc kubenswrapper[4669]: I1210 16:06:39.455556 4669 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:40 crc kubenswrapper[4669]: I1210 16:06:40.177158 4669 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zqcs7"] Dec 10 16:06:40 crc kubenswrapper[4669]: I1210 16:06:40.743800 4669 generic.go:334] "Generic (PLEG): container finished" podID="35554913-80a7-4e31-a955-6157d295a0bf" containerID="1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474" exitCode=0 Dec 10 16:06:40 crc kubenswrapper[4669]: I1210 16:06:40.743905 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerDied","Data":"1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474"} Dec 10 16:06:40 crc kubenswrapper[4669]: I1210 16:06:40.744120 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerStarted","Data":"35e5aa474d4b7b4383d6c597c0755a7a9017ecfd6c93c9d6b955ba0f0d9951ab"} Dec 10 16:06:43 crc kubenswrapper[4669]: I1210 16:06:43.768610 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerStarted","Data":"6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7"} Dec 10 16:06:44 crc kubenswrapper[4669]: I1210 16:06:44.808137 4669 generic.go:334] "Generic (PLEG): container finished" podID="35554913-80a7-4e31-a955-6157d295a0bf" containerID="6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7" exitCode=0 Dec 10 16:06:44 crc kubenswrapper[4669]: I1210 16:06:44.808267 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerDied","Data":"6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7"} Dec 10 16:06:46 crc kubenswrapper[4669]: I1210 16:06:46.831366 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerStarted","Data":"14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea"} Dec 10 16:06:46 crc kubenswrapper[4669]: I1210 16:06:46.851352 4669 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zqcs7" podStartSLOduration=2.246858601 podStartE2EDuration="7.851330023s" podCreationTimestamp="2025-12-10 16:06:39 +0000 UTC" firstStartedPulling="2025-12-10 16:06:40.74572119 +0000 UTC m=+2774.662667817" lastFinishedPulling="2025-12-10 16:06:46.350192602 +0000 UTC m=+2780.267139239" observedRunningTime="2025-12-10 16:06:46.845925811 +0000 UTC m=+2780.762872438" watchObservedRunningTime="2025-12-10 16:06:46.851330023 +0000 UTC m=+2780.768276660" Dec 10 16:06:49 crc kubenswrapper[4669]: I1210 16:06:49.456032 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:49 crc kubenswrapper[4669]: I1210 16:06:49.456429 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:49 crc kubenswrapper[4669]: I1210 16:06:49.523798 4669 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:59 crc kubenswrapper[4669]: I1210 16:06:59.510591 4669 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:06:59 crc kubenswrapper[4669]: I1210 16:06:59.567439 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zqcs7"] Dec 10 16:06:59 crc kubenswrapper[4669]: I1210 16:06:59.941735 4669 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zqcs7" podUID="35554913-80a7-4e31-a955-6157d295a0bf" containerName="registry-server" containerID="cri-o://14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea" gracePeriod=2 Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.412821 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.548267 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-catalog-content\") pod \"35554913-80a7-4e31-a955-6157d295a0bf\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.548365 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-utilities\") pod \"35554913-80a7-4e31-a955-6157d295a0bf\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.548435 4669 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d6nh\" (UniqueName: \"kubernetes.io/projected/35554913-80a7-4e31-a955-6157d295a0bf-kube-api-access-6d6nh\") pod \"35554913-80a7-4e31-a955-6157d295a0bf\" (UID: \"35554913-80a7-4e31-a955-6157d295a0bf\") " Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.549359 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-utilities" (OuterVolumeSpecName: "utilities") pod "35554913-80a7-4e31-a955-6157d295a0bf" (UID: "35554913-80a7-4e31-a955-6157d295a0bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.555166 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35554913-80a7-4e31-a955-6157d295a0bf-kube-api-access-6d6nh" (OuterVolumeSpecName: "kube-api-access-6d6nh") pod "35554913-80a7-4e31-a955-6157d295a0bf" (UID: "35554913-80a7-4e31-a955-6157d295a0bf"). InnerVolumeSpecName "kube-api-access-6d6nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.610406 4669 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35554913-80a7-4e31-a955-6157d295a0bf" (UID: "35554913-80a7-4e31-a955-6157d295a0bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.650341 4669 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.650885 4669 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35554913-80a7-4e31-a955-6157d295a0bf-utilities\") on node \"crc\" DevicePath \"\"" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.650992 4669 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d6nh\" (UniqueName: \"kubernetes.io/projected/35554913-80a7-4e31-a955-6157d295a0bf-kube-api-access-6d6nh\") on node \"crc\" DevicePath \"\"" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.952997 4669 generic.go:334] "Generic (PLEG): container finished" podID="35554913-80a7-4e31-a955-6157d295a0bf" containerID="14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea" exitCode=0 Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.953171 4669 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zqcs7" Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.953344 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerDied","Data":"14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea"} Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.953500 4669 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zqcs7" event={"ID":"35554913-80a7-4e31-a955-6157d295a0bf","Type":"ContainerDied","Data":"35e5aa474d4b7b4383d6c597c0755a7a9017ecfd6c93c9d6b955ba0f0d9951ab"} Dec 10 16:07:00 crc kubenswrapper[4669]: I1210 16:07:00.953595 4669 scope.go:117] "RemoveContainer" containerID="14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.009467 4669 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zqcs7"] Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.019563 4669 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zqcs7"] Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.039759 4669 scope.go:117] "RemoveContainer" containerID="6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.067904 4669 scope.go:117] "RemoveContainer" containerID="1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.095027 4669 scope.go:117] "RemoveContainer" containerID="14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea" Dec 10 16:07:01 crc kubenswrapper[4669]: E1210 16:07:01.095650 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea\": container with ID starting with 14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea not found: ID does not exist" containerID="14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.095800 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea"} err="failed to get container status \"14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea\": rpc error: code = NotFound desc = could not find container \"14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea\": container with ID starting with 14b5dfc976db76853474946e2a6e9f6a00aec7e77067428214c943fec03d44ea not found: ID does not exist" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.095919 4669 scope.go:117] "RemoveContainer" containerID="6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7" Dec 10 16:07:01 crc kubenswrapper[4669]: E1210 16:07:01.096497 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7\": container with ID starting with 6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7 not found: ID does not exist" containerID="6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.096600 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7"} err="failed to get container status \"6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7\": rpc error: code = NotFound desc = could not find container \"6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7\": container with ID starting with 6fb595e4e28cdc717e20a0c91505df94cb8c16f2d6780a07f103eaa3e9daa3f7 not found: ID does not exist" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.096729 4669 scope.go:117] "RemoveContainer" containerID="1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474" Dec 10 16:07:01 crc kubenswrapper[4669]: E1210 16:07:01.097281 4669 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474\": container with ID starting with 1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474 not found: ID does not exist" containerID="1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474" Dec 10 16:07:01 crc kubenswrapper[4669]: I1210 16:07:01.097316 4669 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474"} err="failed to get container status \"1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474\": rpc error: code = NotFound desc = could not find container \"1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474\": container with ID starting with 1883033a9813cd58748c4079bcb39c94097e77da3f6cc7a787380506f0edc474 not found: ID does not exist" Dec 10 16:07:02 crc kubenswrapper[4669]: I1210 16:07:02.408718 4669 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35554913-80a7-4e31-a955-6157d295a0bf" path="/var/lib/kubelet/pods/35554913-80a7-4e31-a955-6157d295a0bf/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515116315261024446 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015116315262017364 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015116307254016511 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015116307254015461 5ustar corecore